mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-26 01:19:21 +00:00
Compare commits
577 Commits
faster_bac
...
2025.02.2
Author | SHA1 | Date | |
---|---|---|---|
![]() |
997a51fc42 | ||
![]() |
cda6325be4 | ||
![]() |
c8cc6fe003 | ||
![]() |
34939cfe52 | ||
![]() |
37bc703bbb | ||
![]() |
5f8e41b441 | ||
![]() |
606db3585c | ||
![]() |
4054749eb2 | ||
![]() |
ad5827d33f | ||
![]() |
249464e928 | ||
![]() |
3bc55c054a | ||
![]() |
4c108eea64 | ||
![]() |
9b2dbd634d | ||
![]() |
2cb2a48184 | ||
![]() |
ed5a0b511e | ||
![]() |
1475dcb50b | ||
![]() |
5cd7f6fd84 | ||
![]() |
52cc17fa3f | ||
![]() |
fa6949f4e4 | ||
![]() |
63a4cee770 | ||
![]() |
7aed0c1b0d | ||
![]() |
de592a6ef4 | ||
![]() |
ff7086c0d0 | ||
![]() |
ef0352ecd6 | ||
![]() |
7348745049 | ||
![]() |
2078044062 | ||
![]() |
d254937590 | ||
![]() |
9a8e52d1fc | ||
![]() |
6e7fac5493 | ||
![]() |
129a37a1f4 | ||
![]() |
01382e774e | ||
![]() |
9164d35615 | ||
![]() |
58df65541c | ||
![]() |
4c04f364a3 | ||
![]() |
7f39538231 | ||
![]() |
be98e0c0f4 | ||
![]() |
9491b1ff89 | ||
![]() |
30cbb039d0 | ||
![]() |
1aabca9489 | ||
![]() |
28a87db515 | ||
![]() |
05b648629f | ||
![]() |
d1d8446480 | ||
![]() |
8b897ba537 | ||
![]() |
c8f1b222c0 | ||
![]() |
257e2ceb82 | ||
![]() |
67a27cae40 | ||
![]() |
8ff9c08e82 | ||
![]() |
1b0aa30881 | ||
![]() |
2a8d2d2b48 | ||
![]() |
44bd787276 | ||
![]() |
690f1c07a7 | ||
![]() |
8e185a8413 | ||
![]() |
1f7df73964 | ||
![]() |
a10afc45b1 | ||
![]() |
61a2101d8a | ||
![]() |
088832c253 | ||
![]() |
a545b680b3 | ||
![]() |
805017eabf | ||
![]() |
b7412b0679 | ||
![]() |
fff3bfd01e | ||
![]() |
5f165a79ba | ||
![]() |
0d3acd1aca | ||
![]() |
463f196472 | ||
![]() |
52d5df6778 | ||
![]() |
ce75c85e65 | ||
![]() |
12fd61142d | ||
![]() |
0073227785 | ||
![]() |
89a215cc1f | ||
![]() |
b2aece8208 | ||
![]() |
600bf91c4f | ||
![]() |
da6bdfa795 | ||
![]() |
5d4894a1ba | ||
![]() |
d4c047bd01 | ||
![]() |
6183b9719c | ||
![]() |
f02d67ee47 | ||
![]() |
bd156ebb53 | ||
![]() |
b07236b544 | ||
![]() |
5928a31fc4 | ||
![]() |
3a71ea7003 | ||
![]() |
96900b1f1b | ||
![]() |
65b39661a6 | ||
![]() |
18251ae8ae | ||
![]() |
c418e0ea76 | ||
![]() |
74b009ccd7 | ||
![]() |
d2631bf398 | ||
![]() |
c62358d851 | ||
![]() |
e3af04701a | ||
![]() |
c2f6e319f2 | ||
![]() |
61b37877be | ||
![]() |
e72c5a037b | ||
![]() |
578383411c | ||
![]() |
dbd37d6575 | ||
![]() |
c7cf1e7593 | ||
![]() |
c06fb069ab | ||
![]() |
b6c2259bd7 | ||
![]() |
d0b7cc8ab3 | ||
![]() |
0f77021bcc | ||
![]() |
b44e6d8cd3 | ||
![]() |
dfe9e94f87 | ||
![]() |
53ccc5249a | ||
![]() |
5993818c16 | ||
![]() |
a631dea01a | ||
![]() |
c5b85b2831 | ||
![]() |
3c1920e4e1 | ||
![]() |
ca6ae7f4ce | ||
![]() |
031ad0dbe6 | ||
![]() |
d8101ddba8 | ||
![]() |
de68868788 | ||
![]() |
90590ae2de | ||
![]() |
5e6bef7189 | ||
![]() |
7ab5555087 | ||
![]() |
02ceb713ea | ||
![]() |
774aef74e8 | ||
![]() |
045454b597 | ||
![]() |
829193fe84 | ||
![]() |
1f893117cc | ||
![]() |
9008009727 | ||
![]() |
3bf3bffabf | ||
![]() |
d44e995aed | ||
![]() |
5a22599b93 | ||
![]() |
ae60e947f3 | ||
![]() |
8115fd98bc | ||
![]() |
3201061ada | ||
![]() |
b68caecbce | ||
![]() |
5e780293c7 | ||
![]() |
6e32144e9a | ||
![]() |
9b52fee0a3 | ||
![]() |
7af4b17430 | ||
![]() |
4195c0fb33 | ||
![]() |
8fe1cfbb20 | ||
![]() |
623c532c9e | ||
![]() |
3a904383af | ||
![]() |
28299affef | ||
![]() |
11ca772ada | ||
![]() |
42e704d563 | ||
![]() |
ec7241c0fd | ||
![]() |
d11d59dd92 | ||
![]() |
7a55f58a5f | ||
![]() |
0b5b5f7fd4 | ||
![]() |
56f3d384d6 | ||
![]() |
29117bb90b | ||
![]() |
5519f6a53b | ||
![]() |
a45d507bee | ||
![]() |
0a663b5c27 | ||
![]() |
0f1fed525c | ||
![]() |
209cddc843 | ||
![]() |
4e0de93096 | ||
![]() |
3b6c5d5d33 | ||
![]() |
0843971e95 | ||
![]() |
12d7496cd1 | ||
![]() |
ed34348c80 | ||
![]() |
fefb83558a | ||
![]() |
93a0ae4030 | ||
![]() |
5394cff296 | ||
![]() |
ca3e6da943 | ||
![]() |
756a5f8836 | ||
![]() |
a8e7bb670e | ||
![]() |
687d7652a0 | ||
![]() |
9f414ee9da | ||
![]() |
67c2f8eb83 | ||
![]() |
c033d5ce8d | ||
![]() |
fd056f3840 | ||
![]() |
e3488b8a08 | ||
![]() |
e1e5d3a8f2 | ||
![]() |
473662e56d | ||
![]() |
b29bc23487 | ||
![]() |
54817ef562 | ||
![]() |
dd8abf738e | ||
![]() |
55e58d39d9 | ||
![]() |
ac5ce4cc9e | ||
![]() |
2525467a2e | ||
![]() |
81066aab83 | ||
![]() |
93f4b24e72 | ||
![]() |
9a07ff7fc4 | ||
![]() |
1a278f2590 | ||
![]() |
93472ed6dd | ||
![]() |
dcaf2653b8 | ||
![]() |
0714d7845a | ||
![]() |
8f2269d871 | ||
![]() |
c7487e004d | ||
![]() |
09d3edf526 | ||
![]() |
9c99bf368f | ||
![]() |
6f196c9dea | ||
![]() |
fcac17f335 | ||
![]() |
f5a026cdd8 | ||
![]() |
c6488c1ee3 | ||
![]() |
f47d0d2867 | ||
![]() |
96df335b36 | ||
![]() |
cc9a931baa | ||
![]() |
95c638991d | ||
![]() |
e2ada42001 | ||
![]() |
22e50b4ace | ||
![]() |
334484de7f | ||
![]() |
180a7c3990 | ||
![]() |
d5f33de808 | ||
![]() |
6539f0df6f | ||
![]() |
1504278223 | ||
![]() |
9f3767b23d | ||
![]() |
e0d7985369 | ||
![]() |
2968a5717c | ||
![]() |
e2b25fe7ce | ||
![]() |
8601f5c49a | ||
![]() |
42279461e0 | ||
![]() |
409447d6ca | ||
![]() |
5b313db49d | ||
![]() |
d64618600d | ||
![]() |
1ee01b1d5e | ||
![]() |
af590202c3 | ||
![]() |
12ca2fb624 | ||
![]() |
ea95f83742 | ||
![]() |
e4d4da601c | ||
![]() |
0582f6fd39 | ||
![]() |
f254af8326 | ||
![]() |
3333770246 | ||
![]() |
ee5ded29ac | ||
![]() |
f530db98ff | ||
![]() |
911f9d661f | ||
![]() |
9935eac146 | ||
![]() |
eae2c9e221 | ||
![]() |
1a67fe8a83 | ||
![]() |
3af565267b | ||
![]() |
d09460a971 | ||
![]() |
c65329442a | ||
![]() |
48430dfa28 | ||
![]() |
70e2de372d | ||
![]() |
75784480ab | ||
![]() |
8a70ba841d | ||
![]() |
77733829d7 | ||
![]() |
d4b67f1946 | ||
![]() |
51ab138bb1 | ||
![]() |
b81413c8b2 | ||
![]() |
2ec33c6ef3 | ||
![]() |
68b2c38c7c | ||
![]() |
1ca22799d1 | ||
![]() |
549dddcb11 | ||
![]() |
131af90469 | ||
![]() |
c7c39da7c6 | ||
![]() |
8310c426f0 | ||
![]() |
bb8f91e39a | ||
![]() |
a359b9a3d5 | ||
![]() |
e130ebad1f | ||
![]() |
f5b996b66c | ||
![]() |
05e0c7c3ab | ||
![]() |
6c1203e4bf | ||
![]() |
5fbcaa8edd | ||
![]() |
00d217b5f7 | ||
![]() |
c0e35376f3 | ||
![]() |
2be84e1282 | ||
![]() |
08f10c96ef | ||
![]() |
12f8ccdf02 | ||
![]() |
d63e78cf34 | ||
![]() |
65d97ca924 | ||
![]() |
5770cafea9 | ||
![]() |
0177cd9528 | ||
![]() |
91a8fae9b5 | ||
![]() |
f16a4ce3ef | ||
![]() |
306f63c75b | ||
![]() |
2a0312318d | ||
![]() |
695a23a454 | ||
![]() |
7366673eea | ||
![]() |
53fa0fe215 | ||
![]() |
1ba621be60 | ||
![]() |
5117364625 | ||
![]() |
986b92aee4 | ||
![]() |
12d26b05af | ||
![]() |
e6c9704505 | ||
![]() |
8ab396d77c | ||
![]() |
8438448843 | ||
![]() |
362edb9a61 | ||
![]() |
1ff53e1853 | ||
![]() |
cfd28dbb5c | ||
![]() |
cbec558289 | ||
![]() |
ca3a2937d0 | ||
![]() |
3e67fc12c5 | ||
![]() |
f6faa18409 | ||
![]() |
21ae2c2e54 | ||
![]() |
eb3986bea2 | ||
![]() |
5d6738ced8 | ||
![]() |
2f2fecddf2 | ||
![]() |
218ba3601e | ||
![]() |
4c3f60c44b | ||
![]() |
cb85e5e464 | ||
![]() |
5b46235872 | ||
![]() |
70f675ac82 | ||
![]() |
bf0c714ea4 | ||
![]() |
c95df56e8d | ||
![]() |
5f3d851954 | ||
![]() |
10c69dcdae | ||
![]() |
bdd81ce3a9 | ||
![]() |
17ee234be4 | ||
![]() |
61034dfa7b | ||
![]() |
185cd362fb | ||
![]() |
e2ca357774 | ||
![]() |
3dea7fc4e8 | ||
![]() |
01ba591bc9 | ||
![]() |
640b7d46e3 | ||
![]() |
d6560c51ee | ||
![]() |
3e9b1938c6 | ||
![]() |
44ce8de71f | ||
![]() |
0bbd15bfda | ||
![]() |
591b9a4d87 | ||
![]() |
5ee7d16687 | ||
![]() |
4ab4350c58 | ||
![]() |
4ea7133fa8 | ||
![]() |
627d67f9d0 | ||
![]() |
eb37655598 | ||
![]() |
19b62dd0d4 | ||
![]() |
b2ad1ceea3 | ||
![]() |
c1545b5b78 | ||
![]() |
2c2f04ba85 | ||
![]() |
77e7bf51b7 | ||
![]() |
a42d71dcef | ||
![]() |
1ff0432f4d | ||
![]() |
54afd6e1c8 | ||
![]() |
458c493a74 | ||
![]() |
8ac8ecb17e | ||
![]() |
eac167067e | ||
![]() |
aa7f4aafeb | ||
![]() |
d2183fa12b | ||
![]() |
928f32bb4f | ||
![]() |
cbe21303c4 | ||
![]() |
94987c04b8 | ||
![]() |
d4ba46a846 | ||
![]() |
1a22d83895 | ||
![]() |
6b73bf5c28 | ||
![]() |
c9c9451c36 | ||
![]() |
1882d448ea | ||
![]() |
2f11c9c9e3 | ||
![]() |
02bdc4b555 | ||
![]() |
1a1ee50d9d | ||
![]() |
50dc09d1a9 | ||
![]() |
130efd340c | ||
![]() |
00bc13c049 | ||
![]() |
3caad67f61 | ||
![]() |
13783f0d4a | ||
![]() |
eae97ba3f4 | ||
![]() |
134dad7357 | ||
![]() |
1c4d2e8dec | ||
![]() |
f2d7be3aac | ||
![]() |
d06edb2dd6 | ||
![]() |
7fa15b334a | ||
![]() |
ffb4e2d6d7 | ||
![]() |
bd8047ae9c | ||
![]() |
49bc0624af | ||
![]() |
5e1d764eb3 | ||
![]() |
0064d93d75 | ||
![]() |
5a838ecfe7 | ||
![]() |
c37b5effd7 | ||
![]() |
ca7f3e8acb | ||
![]() |
b0cdb91d5e | ||
![]() |
4829eb8ae1 | ||
![]() |
1bb814b793 | ||
![]() |
918fcb7d62 | ||
![]() |
bbfd899564 | ||
![]() |
12c4d9da87 | ||
![]() |
6b4fd9b6b8 | ||
![]() |
07c22f4a60 | ||
![]() |
252e1e2ac0 | ||
![]() |
b684c8673e | ||
![]() |
547f42439d | ||
![]() |
c51ceb000f | ||
![]() |
4cbede1bc8 | ||
![]() |
5eac8c7780 | ||
![]() |
ab78d87304 | ||
![]() |
09166e3867 | ||
![]() |
8a5c813cdd | ||
![]() |
4200622f43 | ||
![]() |
c4452a85b4 | ||
![]() |
e57de4a3c1 | ||
![]() |
9fd2c91c55 | ||
![]() |
fbd70013a8 | ||
![]() |
8d18f3e66e | ||
![]() |
5f5754e860 | ||
![]() |
974c882b9a | ||
![]() |
a9ea90096b | ||
![]() |
45c72c426e | ||
![]() |
4e5b75fe19 | ||
![]() |
3cd617e68f | ||
![]() |
ddff02f73b | ||
![]() |
b59347b3d3 | ||
![]() |
1dc769076f | ||
![]() |
f150a19c0f | ||
![]() |
c4bc1e3824 | ||
![]() |
eca99b69db | ||
![]() |
043af72847 | ||
![]() |
05c7b6c639 | ||
![]() |
3385c99f1f | ||
![]() |
895117f857 | ||
![]() |
9e3135e2de | ||
![]() |
9a1c517437 | ||
![]() |
c0c0c4b7ad | ||
![]() |
be6e39fed0 | ||
![]() |
b384921ee0 | ||
![]() |
0d05a6eae3 | ||
![]() |
430aef68c6 | ||
![]() |
eac6070e12 | ||
![]() |
6693b7c2e6 | ||
![]() |
7898c3e433 | ||
![]() |
420ecd064e | ||
![]() |
4289be53f8 | ||
![]() |
29b41b564e | ||
![]() |
998eb69583 | ||
![]() |
8ebc097ff4 | ||
![]() |
c05984ca49 | ||
![]() |
1a700c3013 | ||
![]() |
a9c92cdec8 | ||
![]() |
da8b938d5b | ||
![]() |
71e91328f1 | ||
![]() |
6356be4c52 | ||
![]() |
e26e5440b6 | ||
![]() |
fecfbd1a3e | ||
![]() |
c00d6dfc76 | ||
![]() |
85be66d90d | ||
![]() |
1ac506b391 | ||
![]() |
f7738b77de | ||
![]() |
824037bb7d | ||
![]() |
221292ad14 | ||
![]() |
16f8c75e9f | ||
![]() |
90a37079f1 | ||
![]() |
798092af5e | ||
![]() |
2a622a929d | ||
![]() |
ca8eeaa68c | ||
![]() |
d1b8ac1249 | ||
![]() |
3f629c4d60 | ||
![]() |
3fa910e68b | ||
![]() |
e3cf2989c9 | ||
![]() |
136b2f402d | ||
![]() |
8d18d2d9c6 | ||
![]() |
f18213361a | ||
![]() |
18d9d32bca | ||
![]() |
1246e429c9 | ||
![]() |
77bc46bc37 | ||
![]() |
ce16963c94 | ||
![]() |
a70e8cfe58 | ||
![]() |
ba922a1aaa | ||
![]() |
b09230a884 | ||
![]() |
f1cb9ca08e | ||
![]() |
06513e88c6 | ||
![]() |
b4a79bd068 | ||
![]() |
dfd8fe84e0 | ||
![]() |
4857c2e243 | ||
![]() |
7d384f6160 | ||
![]() |
672a7621f9 | ||
![]() |
f0e2fb3f57 | ||
![]() |
8c3a520512 | ||
![]() |
22e50d56db | ||
![]() |
a0735f3585 | ||
![]() |
50a2e8fde3 | ||
![]() |
55ed63cc79 | ||
![]() |
97e9dfff3f | ||
![]() |
501c9579fb | ||
![]() |
f9aedadee6 | ||
![]() |
c3c17b2bc3 | ||
![]() |
a894c4589e | ||
![]() |
56a8a1b5a1 | ||
![]() |
be3f7a6c37 | ||
![]() |
906e400ab7 | ||
![]() |
a9265afd4c | ||
![]() |
d26058ac80 | ||
![]() |
ebd1f30606 | ||
![]() |
c78e077649 | ||
![]() |
07619223b0 | ||
![]() |
25c326ec6c | ||
![]() |
df167b94c2 | ||
![]() |
3730908881 | ||
![]() |
975dc1bc11 | ||
![]() |
31409f0c32 | ||
![]() |
b19273227b | ||
![]() |
f89179fb03 | ||
![]() |
90c971f9f1 | ||
![]() |
d685780a4a | ||
![]() |
b6bc8b7b7c | ||
![]() |
92daba898f | ||
![]() |
138843591e | ||
![]() |
0814552b2a | ||
![]() |
0e0fadd72d | ||
![]() |
5426bd4392 | ||
![]() |
3520a65099 | ||
![]() |
b15a5c2c87 | ||
![]() |
a8af04ff82 | ||
![]() |
2148de45a0 | ||
![]() |
c4143dacee | ||
![]() |
a8025e77b3 | ||
![]() |
dd1e76be93 | ||
![]() |
36f997959a | ||
![]() |
c1faed163a | ||
![]() |
9ca927dbe7 | ||
![]() |
02c6011818 | ||
![]() |
2e96b16396 | ||
![]() |
53b8de6c1c | ||
![]() |
daea9f893c | ||
![]() |
d1b5b1734c | ||
![]() |
74a5899626 | ||
![]() |
202ebf6d4e | ||
![]() |
2c7b417e25 | ||
![]() |
bb5e138134 | ||
![]() |
3a2c3e2f84 | ||
![]() |
d5be0c34ac | ||
![]() |
ea5431ef2b | ||
![]() |
9c4cdcd11f | ||
![]() |
e5ef6333e4 | ||
![]() |
98779a48b1 | ||
![]() |
9d4848ee77 | ||
![]() |
5126820619 | ||
![]() |
8b5c808e8c | ||
![]() |
9c75996c40 | ||
![]() |
d524778e42 | ||
![]() |
52d4bc660e | ||
![]() |
8884696a6c | ||
![]() |
d493ccde28 | ||
![]() |
1ececaaaa2 | ||
![]() |
91b48ad432 | ||
![]() |
f3fe40a19f | ||
![]() |
cf4b29c425 | ||
![]() |
4344e14a9d | ||
![]() |
df935ec423 | ||
![]() |
e7f9f7504e | ||
![]() |
5721b2353a | ||
![]() |
c9de846d0e | ||
![]() |
a598108c26 | ||
![]() |
5467aa399d | ||
![]() |
da052b074a | ||
![]() |
90c035edd0 | ||
![]() |
fc4eb44a24 | ||
![]() |
a71111b378 | ||
![]() |
52e0c7e484 | ||
![]() |
e32970f191 | ||
![]() |
897cc36017 | ||
![]() |
d79c575860 | ||
![]() |
1f19f84edd | ||
![]() |
27c37b8b84 | ||
![]() |
06a5dd3153 | ||
![]() |
b5bf270d22 | ||
![]() |
8e71d69a64 | ||
![]() |
06edb6f8a8 | ||
![]() |
dca82ec0a1 | ||
![]() |
9c82ce4103 | ||
![]() |
8a23a9eb1b | ||
![]() |
e1b7e515df | ||
![]() |
c8ff335ed7 | ||
![]() |
5736da8ab7 | ||
![]() |
060bba4dce | ||
![]() |
4c573991d2 | ||
![]() |
7fd6dce55f | ||
![]() |
1861d756e9 | ||
![]() |
c36c041f5e | ||
![]() |
c3d877bdd2 | ||
![]() |
1242030d4a | ||
![]() |
1626e74608 | ||
![]() |
b1b913777f | ||
![]() |
190894010c | ||
![]() |
765265723c | ||
![]() |
7e20502379 | ||
![]() |
366fc30e9d | ||
![]() |
aa91788a69 | ||
![]() |
375789b019 | ||
![]() |
140b769a42 | ||
![]() |
88d718271d | ||
![]() |
6ed26cdd1f | ||
![]() |
d1851fa607 | ||
![]() |
e846157c52 | ||
![]() |
e190bb4c1a | ||
![]() |
137fbe7acd | ||
![]() |
9ccdb2ae3a | ||
![]() |
f5f7515744 | ||
![]() |
ddadbec7e3 | ||
![]() |
d24543e103 | ||
![]() |
f80c4c9565 | ||
![]() |
480b383782 | ||
![]() |
d3efd4c24b | ||
![]() |
67a0acffa2 | ||
![]() |
41b07da399 | ||
![]() |
a6ce55d5b5 | ||
![]() |
98c01fe1b3 | ||
![]() |
51df986222 | ||
![]() |
9c625f93a5 | ||
![]() |
7101d47e2e |
@@ -1,38 +1,51 @@
|
|||||||
{
|
{
|
||||||
"name": "Supervisor dev",
|
"name": "Supervisor dev",
|
||||||
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
|
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor",
|
||||||
"containerEnv": {
|
"containerEnv": {
|
||||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
||||||
},
|
},
|
||||||
|
"remoteEnv": {
|
||||||
|
"PATH": "${containerEnv:VIRTUAL_ENV}/bin:${containerEnv:PATH}"
|
||||||
|
},
|
||||||
"appPort": ["9123:8123", "7357:4357"],
|
"appPort": ["9123:8123", "7357:4357"],
|
||||||
"postCreateCommand": "bash devcontainer_bootstrap",
|
"postCreateCommand": "bash devcontainer_setup",
|
||||||
|
"postStartCommand": "bash devcontainer_bootstrap",
|
||||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||||
"customizations": {
|
"customizations": {
|
||||||
"vscode": {
|
"vscode": {
|
||||||
"extensions": [
|
"extensions": [
|
||||||
"ms-python.python",
|
"charliermarsh.ruff",
|
||||||
"ms-python.pylint",
|
"ms-python.pylint",
|
||||||
"ms-python.vscode-pylance",
|
"ms-python.vscode-pylance",
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
"esbenp.prettier-vscode"
|
"redhat.vscode-yaml",
|
||||||
|
"esbenp.prettier-vscode",
|
||||||
|
"GitHub.vscode-pull-request-github"
|
||||||
],
|
],
|
||||||
"settings": {
|
"settings": {
|
||||||
|
"python.defaultInterpreterPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||||
|
"python.pythonPath": "/home/vscode/.local/ha-venv/bin/python",
|
||||||
|
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||||
|
"python.testing.pytestArgs": ["--no-cov"],
|
||||||
|
"pylint.importStrategy": "fromEnvironment",
|
||||||
|
"editor.formatOnPaste": false,
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.formatOnType": true,
|
||||||
|
"files.trimTrailingWhitespace": true,
|
||||||
"terminal.integrated.profiles.linux": {
|
"terminal.integrated.profiles.linux": {
|
||||||
"zsh": {
|
"zsh": {
|
||||||
"path": "/usr/bin/zsh"
|
"path": "/usr/bin/zsh"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||||
"editor.formatOnPaste": false,
|
"[python]": {
|
||||||
"editor.formatOnSave": true,
|
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||||
"editor.formatOnType": true,
|
}
|
||||||
"files.trimTrailingWhitespace": true,
|
|
||||||
"python.pythonPath": "/usr/local/bin/python3",
|
|
||||||
"python.formatting.provider": "black",
|
|
||||||
"python.formatting.blackArgs": ["--target-version", "py312"],
|
|
||||||
"python.formatting.blackPath": "/usr/local/bin/black"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"mounts": ["type=volume,target=/var/lib/docker"]
|
"mounts": [
|
||||||
|
"type=volume,target=/var/lib/docker",
|
||||||
|
"type=volume,target=/mnt/supervisor"
|
||||||
|
]
|
||||||
}
|
}
|
||||||
|
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
9
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -38,6 +38,7 @@
|
|||||||
- This PR is related to issue:
|
- This PR is related to issue:
|
||||||
- Link to documentation pull request:
|
- Link to documentation pull request:
|
||||||
- Link to cli pull request:
|
- Link to cli pull request:
|
||||||
|
- Link to client library pull request:
|
||||||
|
|
||||||
## Checklist
|
## Checklist
|
||||||
|
|
||||||
@@ -52,12 +53,14 @@
|
|||||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||||
- [ ] There is no commented out code in this PR.
|
- [ ] There is no commented out code in this PR.
|
||||||
- [ ] I have followed the [development checklist][dev-checklist]
|
- [ ] I have followed the [development checklist][dev-checklist]
|
||||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
- [ ] The code has been formatted using Ruff (`ruff format supervisor tests`)
|
||||||
- [ ] Tests have been added to verify that the new code works.
|
- [ ] Tests have been added to verify that the new code works.
|
||||||
|
|
||||||
If API endpoints of add-on configuration are added/changed:
|
If API endpoints or add-on configuration are added/changed:
|
||||||
|
|
||||||
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
||||||
|
- [ ] [CLI][cli-repository] updated (if necessary)
|
||||||
|
- [ ] [Client library][client-library-repository] updated (if necessary)
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
Thank you for contributing <3
|
Thank you for contributing <3
|
||||||
@@ -67,3 +70,5 @@ If API endpoints of add-on configuration are added/changed:
|
|||||||
|
|
||||||
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
||||||
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
||||||
|
[cli-repository]: https://github.com/home-assistant/cli
|
||||||
|
[client-library-repository]: https://github.com/home-assistant-libs/python-supervisor-client/
|
||||||
|
26
.github/workflows/builder.yml
vendored
26
.github/workflows/builder.yml
vendored
@@ -33,7 +33,7 @@ on:
|
|||||||
- setup.py
|
- setup.py
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
BUILD_NAME: supervisor
|
BUILD_NAME: supervisor
|
||||||
BUILD_TYPE: supervisor
|
BUILD_TYPE: supervisor
|
||||||
|
|
||||||
@@ -53,7 +53,7 @@ jobs:
|
|||||||
requirements: ${{ steps.requirements.outputs.changed }}
|
requirements: ${{ steps.requirements.outputs.changed }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -92,7 +92,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -106,7 +106,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
if: needs.init.outputs.requirements == 'true'
|
if: needs.init.outputs.requirements == 'true'
|
||||||
uses: home-assistant/wheels@2024.01.0
|
uses: home-assistant/wheels@2024.11.0
|
||||||
with:
|
with:
|
||||||
abi: cp312
|
abi: cp312
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@@ -125,20 +125,20 @@ jobs:
|
|||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.4.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: sigstore/cosign-installer@v3.3.0
|
uses: sigstore/cosign-installer@v3.8.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.0.2"
|
cosign-release: "v2.4.0"
|
||||||
|
|
||||||
- name: Install dirhash and calc hash
|
- name: Install dirhash and calc hash
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
run: |
|
run: |
|
||||||
pip3 install dirhash
|
pip3 install setuptools dirhash
|
||||||
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
dir_hash="$(dirhash "${{ github.workspace }}/supervisor" -a sha256 --match "*.py")"
|
||||||
echo "${dir_hash}" > rootfs/supervisor.sha256
|
echo "${dir_hash}" > rootfs/supervisor.sha256
|
||||||
|
|
||||||
@@ -149,7 +149,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: docker/login-action@v3.0.0
|
uses: docker/login-action@v3.3.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -160,7 +160,7 @@ jobs:
|
|||||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||||
|
|
||||||
- name: Build supervisor
|
- name: Build supervisor
|
||||||
uses: home-assistant/builder@2024.01.0
|
uses: home-assistant/builder@2024.08.2
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
$BUILD_ARGS \
|
$BUILD_ARGS \
|
||||||
@@ -178,7 +178,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
if: needs.init.outputs.publish == 'true'
|
if: needs.init.outputs.publish == 'true'
|
||||||
@@ -203,11 +203,11 @@ jobs:
|
|||||||
timeout-minutes: 60
|
timeout-minutes: 60
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
|
|
||||||
- name: Build the Supervisor
|
- name: Build the Supervisor
|
||||||
if: needs.init.outputs.publish != 'true'
|
if: needs.init.outputs.publish != 'true'
|
||||||
uses: home-assistant/builder@2024.01.0
|
uses: home-assistant/builder@2024.08.2
|
||||||
with:
|
with:
|
||||||
args: |
|
args: |
|
||||||
--test \
|
--test \
|
||||||
|
310
.github/workflows/ci.yaml
vendored
310
.github/workflows/ci.yaml
vendored
@@ -8,7 +8,7 @@ on:
|
|||||||
pull_request: ~
|
pull_request: ~
|
||||||
|
|
||||||
env:
|
env:
|
||||||
DEFAULT_PYTHON: "3.12"
|
DEFAULT_PYTHON: "3.13"
|
||||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
@@ -25,15 +25,15 @@ jobs:
|
|||||||
name: Prepare Python dependencies
|
name: Prepare Python dependencies
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.4.0
|
||||||
with:
|
with:
|
||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -47,7 +47,7 @@ jobs:
|
|||||||
pip install -r requirements.txt -r requirements_tests.txt
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@@ -61,21 +61,21 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit install-hooks
|
pre-commit install-hooks
|
||||||
|
|
||||||
lint-black:
|
lint-ruff-format:
|
||||||
name: Check black
|
name: Check ruff-format
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -85,10 +85,67 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Run black
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v4.2.1
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run ruff-format
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
black --target-version py312 --check supervisor tests setup.py
|
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||||
|
env:
|
||||||
|
RUFF_OUTPUT_FORMAT: github
|
||||||
|
|
||||||
|
lint-ruff:
|
||||||
|
name: Check ruff
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v4.2.2
|
||||||
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
|
uses: actions/setup-python@v5.4.0
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v4.2.1
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v4.2.1
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run ruff
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure
|
||||||
|
env:
|
||||||
|
RUFF_OUTPUT_FORMAT: github
|
||||||
|
|
||||||
lint-dockerfile:
|
lint-dockerfile:
|
||||||
name: Check Dockerfile
|
name: Check Dockerfile
|
||||||
@@ -96,7 +153,7 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -111,15 +168,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -131,7 +188,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
@@ -149,94 +206,21 @@ jobs:
|
|||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||||
|
|
||||||
lint-flake8:
|
|
||||||
name: Check flake8
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.0.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Register flake8 problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
|
||||||
- name: Run flake8
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
flake8 supervisor tests
|
|
||||||
|
|
||||||
lint-isort:
|
|
||||||
name: Check isort
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.0.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run isort
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
|
||||||
|
|
||||||
lint-json:
|
lint-json:
|
||||||
name: Check JSON
|
name: Check JSON
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -248,7 +232,7 @@ jobs:
|
|||||||
exit 1
|
exit 1
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
key: |
|
key: |
|
||||||
@@ -272,92 +256,15 @@ jobs:
|
|||||||
needs: prepare
|
needs: prepare
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Register pylint problem matcher
|
|
||||||
run: |
|
|
||||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
|
||||||
- name: Run pylint
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pylint supervisor tests
|
|
||||||
|
|
||||||
lint-pyupgrade:
|
|
||||||
name: Check pyupgrade
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.0.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: venv
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
|
||||||
- name: Fail job if Python cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Restore pre-commit environment from cache
|
|
||||||
id: cache-precommit
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
|
||||||
key: |
|
|
||||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
|
||||||
- name: Fail job if cache restore failed
|
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
|
||||||
run: |
|
|
||||||
echo "Failed to restore Python virtual environment from cache"
|
|
||||||
exit 1
|
|
||||||
- name: Run pyupgrade
|
|
||||||
run: |
|
|
||||||
. venv/bin/activate
|
|
||||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
|
||||||
|
|
||||||
pytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
needs: prepare
|
|
||||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
steps:
|
|
||||||
- name: Check out code from GitHub
|
|
||||||
uses: actions/checkout@v4.1.1
|
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
|
||||||
uses: actions/setup-python@v5.0.0
|
|
||||||
id: python
|
|
||||||
with:
|
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
|
||||||
- name: Install Cosign
|
|
||||||
uses: sigstore/cosign-installer@v3.3.0
|
|
||||||
with:
|
|
||||||
cosign-release: "v2.0.2"
|
|
||||||
- name: Restore Python virtual environment
|
|
||||||
id: cache-venv
|
|
||||||
uses: actions/cache@v3.3.3
|
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -370,7 +277,47 @@ jobs:
|
|||||||
- name: Install additional system dependencies
|
- name: Install additional system dependencies
|
||||||
run: |
|
run: |
|
||||||
sudo apt-get update
|
sudo apt-get update
|
||||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus dbus-x11
|
sudo apt-get install -y --no-install-recommends libpulse0
|
||||||
|
- name: Register pylint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||||
|
- name: Run pylint
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pylint supervisor tests
|
||||||
|
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v4.2.2
|
||||||
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
|
uses: actions/setup-python@v5.4.0
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
|
- name: Install Cosign
|
||||||
|
uses: sigstore/cosign-installer@v3.8.0
|
||||||
|
with:
|
||||||
|
cosign-release: "v2.4.0"
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v4.2.1
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Install additional system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends libpulse0 libudev1 dbus-daemon
|
||||||
- name: Register Python problem matcher
|
- name: Register Python problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||||
@@ -392,10 +339,11 @@ jobs:
|
|||||||
-o console_output_style=count \
|
-o console_output_style=count \
|
||||||
tests
|
tests
|
||||||
- name: Upload coverage artifact
|
- name: Upload coverage artifact
|
||||||
uses: actions/upload-artifact@v4.0.0
|
uses: actions/upload-artifact@v4.6.0
|
||||||
with:
|
with:
|
||||||
name: coverage-${{ matrix.python-version }}
|
name: coverage-${{ matrix.python-version }}
|
||||||
path: .coverage
|
path: .coverage
|
||||||
|
include-hidden-files: true
|
||||||
|
|
||||||
coverage:
|
coverage:
|
||||||
name: Process test coverage
|
name: Process test coverage
|
||||||
@@ -403,15 +351,15 @@ jobs:
|
|||||||
needs: ["pytest", "prepare"]
|
needs: ["pytest", "prepare"]
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||||
uses: actions/setup-python@v5.0.0
|
uses: actions/setup-python@v5.4.0
|
||||||
id: python
|
id: python
|
||||||
with:
|
with:
|
||||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||||
- name: Restore Python virtual environment
|
- name: Restore Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v3.3.3
|
uses: actions/cache@v4.2.1
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: |
|
key: |
|
||||||
@@ -422,7 +370,7 @@ jobs:
|
|||||||
echo "Failed to restore Python virtual environment from cache"
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
exit 1
|
exit 1
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.1.1
|
uses: actions/download-artifact@v4.1.8
|
||||||
- name: Combine coverage results
|
- name: Combine coverage results
|
||||||
run: |
|
run: |
|
||||||
. venv/bin/activate
|
. venv/bin/activate
|
||||||
@@ -430,4 +378,4 @@ jobs:
|
|||||||
coverage report
|
coverage report
|
||||||
coverage xml
|
coverage xml
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v3.1.4
|
uses: codecov/codecov-action@v5.3.1
|
||||||
|
30
.github/workflows/matchers/flake8.json
vendored
30
.github/workflows/matchers/flake8.json
vendored
@@ -1,30 +0,0 @@
|
|||||||
{
|
|
||||||
"problemMatcher": [
|
|
||||||
{
|
|
||||||
"owner": "flake8-error",
|
|
||||||
"severity": "error",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"owner": "flake8-warning",
|
|
||||||
"severity": "warning",
|
|
||||||
"pattern": [
|
|
||||||
{
|
|
||||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
|
||||||
"file": 1,
|
|
||||||
"line": 2,
|
|
||||||
"column": 3,
|
|
||||||
"message": 4
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
]
|
|
||||||
}
|
|
4
.github/workflows/release-drafter.yml
vendored
4
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
|||||||
name: Release Drafter
|
name: Release Drafter
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ jobs:
|
|||||||
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
||||||
|
|
||||||
- name: Run Release Drafter
|
- name: Run Release Drafter
|
||||||
uses: release-drafter/release-drafter@v5.25.0
|
uses: release-drafter/release-drafter@v6.1.0
|
||||||
with:
|
with:
|
||||||
tag: ${{ steps.version.outputs.version }}
|
tag: ${{ steps.version.outputs.version }}
|
||||||
name: ${{ steps.version.outputs.version }}
|
name: ${{ steps.version.outputs.version }}
|
||||||
|
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@@ -10,9 +10,9 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.1.1
|
uses: actions/checkout@v4.2.2
|
||||||
- name: Sentry Release
|
- name: Sentry Release
|
||||||
uses: getsentry/action-release@v1.6.0
|
uses: getsentry/action-release@v1.10.4
|
||||||
env:
|
env:
|
||||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
stale:
|
stale:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/stale@v9.0.0
|
- uses: actions/stale@v9.1.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 30
|
days-before-stale: 30
|
||||||
|
74
.github/workflows/update_frontend.yml
vendored
Normal file
74
.github/workflows/update_frontend.yml
vendored
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
name: Update frontend
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule: # once a day
|
||||||
|
- cron: "0 0 * * *"
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
check-version:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
|
||||||
|
latest_tag: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Get latest frontend release
|
||||||
|
id: latest_frontend_version
|
||||||
|
uses: abatilo/release-info-action@v1.3.3
|
||||||
|
with:
|
||||||
|
owner: home-assistant
|
||||||
|
repo: frontend
|
||||||
|
- name: Check if version is up to date
|
||||||
|
id: check_version
|
||||||
|
run: |
|
||||||
|
SUPERVISOR_VERSION=$(cat .ha-frontend-version)
|
||||||
|
LATEST_VERSION=${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||||
|
echo "SUPERVISOR_VERSION=$SUPERVISOR_VERSION" >> $GITHUB_ENV
|
||||||
|
echo "LATEST_VERSION=$LATEST_VERSION" >> $GITHUB_ENV
|
||||||
|
if [[ ! "$SUPERVISOR_VERSION" < "$LATEST_VERSION" ]]; then
|
||||||
|
echo "Frontend version is up to date"
|
||||||
|
echo "skip=true" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
- name: Check if there is no open PR with this version
|
||||||
|
if: steps.check_version.outputs.skip != 'true'
|
||||||
|
id: check_existing_pr
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ github.token }}
|
||||||
|
run: |
|
||||||
|
PR=$(gh pr list --state open --base main --json title --search "Autoupdate frontend to version $LATEST_VERSION")
|
||||||
|
if [[ "$PR" != "[]" ]]; then
|
||||||
|
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
|
||||||
|
echo "skip=true" >> $GITHUB_OUTPUT
|
||||||
|
fi
|
||||||
|
create-pr:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: check-version
|
||||||
|
if: needs.check-version.outputs.skip != 'true'
|
||||||
|
steps:
|
||||||
|
- name: Checkout code
|
||||||
|
uses: actions/checkout@v4
|
||||||
|
- name: Clear www folder
|
||||||
|
run: |
|
||||||
|
rm -rf supervisor/api/panel/*
|
||||||
|
- name: Update version file
|
||||||
|
run: |
|
||||||
|
echo "${{ needs.check-version.outputs.latest_tag }}" > .ha-frontend-version
|
||||||
|
- name: Download release assets
|
||||||
|
uses: robinraju/release-downloader@v1
|
||||||
|
with:
|
||||||
|
repository: 'home-assistant/frontend'
|
||||||
|
tag: ${{ needs.check-version.outputs.latest_tag }}
|
||||||
|
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_tag }}.tar.gz
|
||||||
|
extract: true
|
||||||
|
out-file-path: supervisor/api/panel/
|
||||||
|
- name: Create PR
|
||||||
|
uses: peter-evans/create-pull-request@v7
|
||||||
|
with:
|
||||||
|
commit-message: "Autoupdate frontend to version ${{ needs.check-version.outputs.latest_tag }}"
|
||||||
|
branch: autoupdate-frontend
|
||||||
|
base: main
|
||||||
|
draft: true
|
||||||
|
sign-commits: true
|
||||||
|
title: "Autoupdate frontend to version ${{ needs.check-version.outputs.latest_tag }}"
|
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -1,4 +0,0 @@
|
|||||||
[submodule "home-assistant-polymer"]
|
|
||||||
path = home-assistant-polymer
|
|
||||||
url = https://github.com/home-assistant/home-assistant-polymer
|
|
||||||
branch = dev
|
|
1
.ha-frontend-version
Normal file
1
.ha-frontend-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
20250205.0
|
@@ -1,34 +1,15 @@
|
|||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 23.12.1
|
rev: v0.9.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: ruff
|
||||||
args:
|
args:
|
||||||
- --safe
|
- --fix
|
||||||
- --quiet
|
- id: ruff-format
|
||||||
- --target-version
|
|
||||||
- py312
|
|
||||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||||
- repo: https://github.com/PyCQA/flake8
|
|
||||||
rev: 7.0.0
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
additional_dependencies:
|
|
||||||
- flake8-docstrings==1.7.0
|
|
||||||
- pydocstyle==6.3.0
|
|
||||||
files: ^(supervisor|script|tests)/.+\.py$
|
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.5.0
|
rev: v5.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
- id: check-json
|
- id: check-json
|
||||||
- repo: https://github.com/PyCQA/isort
|
|
||||||
rev: 5.13.2
|
|
||||||
hooks:
|
|
||||||
- id: isort
|
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
|
||||||
rev: v3.15.0
|
|
||||||
hooks:
|
|
||||||
- id: pyupgrade
|
|
||||||
args: [--py312-plus]
|
|
||||||
|
18
.vscode/tasks.json
vendored
18
.vscode/tasks.json
vendored
@@ -58,9 +58,23 @@
|
|||||||
"problemMatcher": []
|
"problemMatcher": []
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"label": "Flake8",
|
"label": "Ruff Check",
|
||||||
"type": "shell",
|
"type": "shell",
|
||||||
"command": "flake8 supervisor tests",
|
"command": "ruff check --fix supervisor tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Ruff Format",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "ruff format supervisor tests",
|
||||||
"group": {
|
"group": {
|
||||||
"kind": "test",
|
"kind": "test",
|
||||||
"isDefault": true
|
"isDefault": true
|
||||||
|
17
Dockerfile
17
Dockerfile
@@ -4,7 +4,8 @@ FROM ${BUILD_FROM}
|
|||||||
ENV \
|
ENV \
|
||||||
S6_SERVICES_GRACETIME=10000 \
|
S6_SERVICES_GRACETIME=10000 \
|
||||||
SUPERVISOR_API=http://localhost \
|
SUPERVISOR_API=http://localhost \
|
||||||
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1
|
CRYPTOGRAPHY_OPENSSL_NO_LEGACY=1 \
|
||||||
|
UV_SYSTEM_PYTHON=true
|
||||||
|
|
||||||
ARG \
|
ARG \
|
||||||
COSIGN_VERSION \
|
COSIGN_VERSION \
|
||||||
@@ -26,20 +27,24 @@ RUN \
|
|||||||
yaml \
|
yaml \
|
||||||
\
|
\
|
||||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
||||||
&& chmod a+x /usr/bin/cosign
|
&& chmod a+x /usr/bin/cosign \
|
||||||
|
&& pip3 install uv==0.6.1
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt .
|
COPY requirements.txt .
|
||||||
RUN \
|
RUN \
|
||||||
export MAKEFLAGS="-j$(nproc)" \
|
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||||
&& pip3 install --only-binary=:all: \
|
setarch="linux32"; \
|
||||||
-r ./requirements.txt \
|
else \
|
||||||
|
setarch=""; \
|
||||||
|
fi \
|
||||||
|
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
|
||||||
&& rm -f requirements.txt
|
&& rm -f requirements.txt
|
||||||
|
|
||||||
# Install Home Assistant Supervisor
|
# Install Home Assistant Supervisor
|
||||||
COPY . supervisor
|
COPY . supervisor
|
||||||
RUN \
|
RUN \
|
||||||
pip3 install -e ./supervisor \
|
uv pip install --no-cache -e ./supervisor \
|
||||||
&& python3 -m compileall ./supervisor/supervisor
|
&& python3 -m compileall ./supervisor/supervisor
|
||||||
|
|
||||||
|
|
||||||
|
@@ -30,3 +30,5 @@ Releases are done in 3 stages (channels) with this structure:
|
|||||||
|
|
||||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||||
|
|
||||||
|
[](https://www.openhomefoundation.org/)
|
||||||
|
12
build.yaml
12
build.yaml
@@ -1,10 +1,10 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.18
|
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.21
|
||||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.18
|
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.21
|
||||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.18
|
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.21
|
||||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.18
|
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.21
|
||||||
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.18
|
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.21
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
@@ -12,7 +12,7 @@ cosign:
|
|||||||
base_identity: https://github.com/home-assistant/docker-base/.*
|
base_identity: https://github.com/home-assistant/docker-base/.*
|
||||||
identity: https://github.com/home-assistant/supervisor/.*
|
identity: https://github.com/home-assistant/supervisor/.*
|
||||||
args:
|
args:
|
||||||
COSIGN_VERSION: 2.0.2
|
COSIGN_VERSION: 2.4.0
|
||||||
labels:
|
labels:
|
||||||
io.hass.type: supervisor
|
io.hass.type: supervisor
|
||||||
org.opencontainers.image.title: Home Assistant Supervisor
|
org.opencontainers.image.title: Home Assistant Supervisor
|
||||||
|
Submodule home-assistant-polymer deleted from 9d457d52e8
296
pyproject.toml
296
pyproject.toml
@@ -1,5 +1,5 @@
|
|||||||
[build-system]
|
[build-system]
|
||||||
requires = ["setuptools~=68.0.0", "wheel~=0.40.0"]
|
requires = ["setuptools~=75.8.0", "wheel~=0.45.0"]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
@@ -12,7 +12,7 @@ authors = [
|
|||||||
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
||||||
]
|
]
|
||||||
keywords = ["docker", "home-assistant", "api"]
|
keywords = ["docker", "home-assistant", "api"]
|
||||||
requires-python = ">=3.12.0"
|
requires-python = ">=3.13.0"
|
||||||
|
|
||||||
[project.urls]
|
[project.urls]
|
||||||
"Homepage" = "https://www.home-assistant.io/"
|
"Homepage" = "https://www.home-assistant.io/"
|
||||||
@@ -31,7 +31,7 @@ include-package-data = true
|
|||||||
include = ["supervisor*"]
|
include = ["supervisor*"]
|
||||||
|
|
||||||
[tool.pylint.MAIN]
|
[tool.pylint.MAIN]
|
||||||
py-version = "3.11"
|
py-version = "3.13"
|
||||||
# Use a conservative default here; 2 should speed up most setups and not hurt
|
# Use a conservative default here; 2 should speed up most setups and not hurt
|
||||||
# any too bad. Override on command line as appropriate.
|
# any too bad. Override on command line as appropriate.
|
||||||
jobs = 2
|
jobs = 2
|
||||||
@@ -44,7 +44,7 @@ good-names = ["id", "i", "j", "k", "ex", "Run", "_", "fp", "T", "os"]
|
|||||||
|
|
||||||
[tool.pylint."MESSAGES CONTROL"]
|
[tool.pylint."MESSAGES CONTROL"]
|
||||||
# Reasons disabled:
|
# Reasons disabled:
|
||||||
# format - handled by black
|
# format - handled by ruff
|
||||||
# abstract-method - with intro of async there are always methods missing
|
# abstract-method - with intro of async there are always methods missing
|
||||||
# cyclic-import - doesn't test if both import on load
|
# cyclic-import - doesn't test if both import on load
|
||||||
# duplicate-code - unavoidable
|
# duplicate-code - unavoidable
|
||||||
@@ -71,6 +71,136 @@ disable = [
|
|||||||
"too-many-statements",
|
"too-many-statements",
|
||||||
"unused-argument",
|
"unused-argument",
|
||||||
"consider-using-with",
|
"consider-using-with",
|
||||||
|
|
||||||
|
# Handled by ruff
|
||||||
|
# Ref: <https://github.com/astral-sh/ruff/issues/970>
|
||||||
|
"await-outside-async", # PLE1142
|
||||||
|
"bad-str-strip-call", # PLE1310
|
||||||
|
"bad-string-format-type", # PLE1307
|
||||||
|
"bidirectional-unicode", # PLE2502
|
||||||
|
"continue-in-finally", # PLE0116
|
||||||
|
"duplicate-bases", # PLE0241
|
||||||
|
"format-needs-mapping", # F502
|
||||||
|
"function-redefined", # F811
|
||||||
|
# Needed because ruff does not understand type of __all__ generated by a function
|
||||||
|
# "invalid-all-format", # PLE0605
|
||||||
|
"invalid-all-object", # PLE0604
|
||||||
|
"invalid-character-backspace", # PLE2510
|
||||||
|
"invalid-character-esc", # PLE2513
|
||||||
|
"invalid-character-nul", # PLE2514
|
||||||
|
"invalid-character-sub", # PLE2512
|
||||||
|
"invalid-character-zero-width-space", # PLE2515
|
||||||
|
"logging-too-few-args", # PLE1206
|
||||||
|
"logging-too-many-args", # PLE1205
|
||||||
|
"missing-format-string-key", # F524
|
||||||
|
"mixed-format-string", # F506
|
||||||
|
"no-method-argument", # N805
|
||||||
|
"no-self-argument", # N805
|
||||||
|
"nonexistent-operator", # B002
|
||||||
|
"nonlocal-without-binding", # PLE0117
|
||||||
|
"not-in-loop", # F701, F702
|
||||||
|
"notimplemented-raised", # F901
|
||||||
|
"return-in-init", # PLE0101
|
||||||
|
"return-outside-function", # F706
|
||||||
|
"syntax-error", # E999
|
||||||
|
"too-few-format-args", # F524
|
||||||
|
"too-many-format-args", # F522
|
||||||
|
"too-many-star-expressions", # F622
|
||||||
|
"truncated-format-string", # F501
|
||||||
|
"undefined-all-variable", # F822
|
||||||
|
"undefined-variable", # F821
|
||||||
|
"used-prior-global-declaration", # PLE0118
|
||||||
|
"yield-inside-async-function", # PLE1700
|
||||||
|
"yield-outside-function", # F704
|
||||||
|
"anomalous-backslash-in-string", # W605
|
||||||
|
"assert-on-string-literal", # PLW0129
|
||||||
|
"assert-on-tuple", # F631
|
||||||
|
"bad-format-string", # W1302, F
|
||||||
|
"bad-format-string-key", # W1300, F
|
||||||
|
"bare-except", # E722
|
||||||
|
"binary-op-exception", # PLW0711
|
||||||
|
"cell-var-from-loop", # B023
|
||||||
|
# "dangerous-default-value", # B006, ruff catches new occurrences, needs more work
|
||||||
|
"duplicate-except", # B014
|
||||||
|
"duplicate-key", # F601
|
||||||
|
"duplicate-string-formatting-argument", # F
|
||||||
|
"duplicate-value", # F
|
||||||
|
"eval-used", # PGH001
|
||||||
|
"exec-used", # S102
|
||||||
|
# "expression-not-assigned", # B018, ruff catches new occurrences, needs more work
|
||||||
|
"f-string-without-interpolation", # F541
|
||||||
|
"forgotten-debug-statement", # T100
|
||||||
|
"format-string-without-interpolation", # F
|
||||||
|
# "global-statement", # PLW0603, ruff catches new occurrences, needs more work
|
||||||
|
"global-variable-not-assigned", # PLW0602
|
||||||
|
"implicit-str-concat", # ISC001
|
||||||
|
"import-self", # PLW0406
|
||||||
|
"inconsistent-quotes", # Q000
|
||||||
|
"invalid-envvar-default", # PLW1508
|
||||||
|
"keyword-arg-before-vararg", # B026
|
||||||
|
"logging-format-interpolation", # G
|
||||||
|
"logging-fstring-interpolation", # G
|
||||||
|
"logging-not-lazy", # G
|
||||||
|
"misplaced-future", # F404
|
||||||
|
"named-expr-without-context", # PLW0131
|
||||||
|
"nested-min-max", # PLW3301
|
||||||
|
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
||||||
|
"raise-missing-from", # TRY200
|
||||||
|
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
||||||
|
"try-except-raise", # TRY203
|
||||||
|
"unused-argument", # ARG001, we don't use it
|
||||||
|
"unused-format-string-argument", #F507
|
||||||
|
"unused-format-string-key", # F504
|
||||||
|
"unused-import", # F401
|
||||||
|
"unused-variable", # F841
|
||||||
|
"useless-else-on-loop", # PLW0120
|
||||||
|
"wildcard-import", # F403
|
||||||
|
"bad-classmethod-argument", # N804
|
||||||
|
"consider-iterating-dictionary", # SIM118
|
||||||
|
"empty-docstring", # D419
|
||||||
|
"invalid-name", # N815
|
||||||
|
"line-too-long", # E501, disabled globally
|
||||||
|
"missing-class-docstring", # D101
|
||||||
|
"missing-final-newline", # W292
|
||||||
|
"missing-function-docstring", # D103
|
||||||
|
"missing-module-docstring", # D100
|
||||||
|
"multiple-imports", #E401
|
||||||
|
"singleton-comparison", # E711, E712
|
||||||
|
"subprocess-run-check", # PLW1510
|
||||||
|
"superfluous-parens", # UP034
|
||||||
|
"ungrouped-imports", # I001
|
||||||
|
"unidiomatic-typecheck", # E721
|
||||||
|
"unnecessary-direct-lambda-call", # PLC3002
|
||||||
|
"unnecessary-lambda-assignment", # PLC3001
|
||||||
|
"unneeded-not", # SIM208
|
||||||
|
"useless-import-alias", # PLC0414
|
||||||
|
"wrong-import-order", # I001
|
||||||
|
"wrong-import-position", # E402
|
||||||
|
"comparison-of-constants", # PLR0133
|
||||||
|
"comparison-with-itself", # PLR0124
|
||||||
|
# "consider-alternative-union-syntax", # UP007, typing extension
|
||||||
|
"consider-merging-isinstance", # PLR1701
|
||||||
|
# "consider-using-alias", # UP006, typing extension
|
||||||
|
"consider-using-dict-comprehension", # C402
|
||||||
|
"consider-using-generator", # C417
|
||||||
|
"consider-using-get", # SIM401
|
||||||
|
"consider-using-set-comprehension", # C401
|
||||||
|
"consider-using-sys-exit", # PLR1722
|
||||||
|
"consider-using-ternary", # SIM108
|
||||||
|
"literal-comparison", # F632
|
||||||
|
"property-with-parameters", # PLR0206
|
||||||
|
"super-with-arguments", # UP008
|
||||||
|
"too-many-branches", # PLR0912
|
||||||
|
"too-many-return-statements", # PLR0911
|
||||||
|
"too-many-statements", # PLR0915
|
||||||
|
"trailing-comma-tuple", # COM818
|
||||||
|
"unnecessary-comprehension", # C416
|
||||||
|
"use-a-generator", # C417
|
||||||
|
"use-dict-literal", # C406
|
||||||
|
"use-list-literal", # C405
|
||||||
|
"useless-object-inheritance", # UP004
|
||||||
|
"useless-return", # PLR1711
|
||||||
|
# "no-self-use", # PLR6301 # Optional plugin, not enabled
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.pylint.REPORTS]
|
[tool.pylint.REPORTS]
|
||||||
@@ -85,11 +215,15 @@ expected-line-ending-format = "LF"
|
|||||||
[tool.pylint.EXCEPTIONS]
|
[tool.pylint.EXCEPTIONS]
|
||||||
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
|
overgeneral-exceptions = ["builtins.BaseException", "builtins.Exception"]
|
||||||
|
|
||||||
|
[tool.pylint.DESIGN]
|
||||||
|
max-positional-arguments = 10
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
[tool.pytest.ini_options]
|
||||||
testpaths = ["tests"]
|
testpaths = ["tests"]
|
||||||
norecursedirs = [".git"]
|
norecursedirs = [".git"]
|
||||||
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
||||||
log_date_format = "%Y-%m-%d %H:%M:%S"
|
log_date_format = "%Y-%m-%d %H:%M:%S"
|
||||||
|
asyncio_default_fixture_loop_scope = "function"
|
||||||
asyncio_mode = "auto"
|
asyncio_mode = "auto"
|
||||||
filterwarnings = [
|
filterwarnings = [
|
||||||
"error",
|
"error",
|
||||||
@@ -97,16 +231,144 @@ filterwarnings = [
|
|||||||
"ignore::pytest.PytestUnraisableExceptionWarning",
|
"ignore::pytest.PytestUnraisableExceptionWarning",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.isort]
|
[tool.ruff]
|
||||||
multi_line_output = 3
|
lint.select = [
|
||||||
include_trailing_comma = true
|
"B002", # Python does not support the unary prefix increment
|
||||||
force_grid_wrap = 0
|
"B007", # Loop control variable {name} not used within loop body
|
||||||
line_length = 88
|
"B014", # Exception handler with duplicate exception
|
||||||
indent = " "
|
"B023", # Function definition does not bind loop variable {name}
|
||||||
force_sort_within_sections = true
|
"B026", # Star-arg unpacking after a keyword argument is strongly discouraged
|
||||||
sections = ["FUTURE", "STDLIB", "THIRDPARTY", "FIRSTPARTY", "LOCALFOLDER"]
|
"B904", # Use raise from to specify exception cause
|
||||||
default_section = "THIRDPARTY"
|
"C", # complexity
|
||||||
forced_separate = "tests"
|
"COM818", # Trailing comma on bare tuple prohibited
|
||||||
combine_as_imports = true
|
"D", # docstrings
|
||||||
use_parentheses = true
|
"DTZ003", # Use datetime.now(tz=) instead of datetime.utcnow()
|
||||||
known_first_party = ["supervisor", "tests"]
|
"DTZ004", # Use datetime.fromtimestamp(ts, tz=) instead of datetime.utcfromtimestamp(ts)
|
||||||
|
"E", # pycodestyle
|
||||||
|
"F", # pyflakes/autoflake
|
||||||
|
"G", # flake8-logging-format
|
||||||
|
"I", # isort
|
||||||
|
"ICN001", # import concentions; {name} should be imported as {asname}
|
||||||
|
"N804", # First argument of a class method should be named cls
|
||||||
|
"N805", # First argument of a method should be named self
|
||||||
|
"N815", # Variable {name} in class scope should not be mixedCase
|
||||||
|
"PGH004", # Use specific rule codes when using noqa
|
||||||
|
"PLC0414", # Useless import alias. Import alias does not rename original package.
|
||||||
|
"PLC", # pylint
|
||||||
|
"PLE", # pylint
|
||||||
|
"PLR", # pylint
|
||||||
|
"PLW", # pylint
|
||||||
|
"Q000", # Double quotes found but single quotes preferred
|
||||||
|
"RUF006", # Store a reference to the return value of asyncio.create_task
|
||||||
|
"S102", # Use of exec detected
|
||||||
|
"S103", # bad-file-permissions
|
||||||
|
"S108", # hardcoded-temp-file
|
||||||
|
"S306", # suspicious-mktemp-usage
|
||||||
|
"S307", # suspicious-eval-usage
|
||||||
|
"S313", # suspicious-xmlc-element-tree-usage
|
||||||
|
"S314", # suspicious-xml-element-tree-usage
|
||||||
|
"S315", # suspicious-xml-expat-reader-usage
|
||||||
|
"S316", # suspicious-xml-expat-builder-usage
|
||||||
|
"S317", # suspicious-xml-sax-usage
|
||||||
|
"S318", # suspicious-xml-mini-dom-usage
|
||||||
|
"S319", # suspicious-xml-pull-dom-usage
|
||||||
|
"S320", # suspicious-xmle-tree-usage
|
||||||
|
"S601", # paramiko-call
|
||||||
|
"S602", # subprocess-popen-with-shell-equals-true
|
||||||
|
"S604", # call-with-shell-equals-true
|
||||||
|
"S608", # hardcoded-sql-expression
|
||||||
|
"S609", # unix-command-wildcard-injection
|
||||||
|
"SIM105", # Use contextlib.suppress({exception}) instead of try-except-pass
|
||||||
|
"SIM117", # Merge with-statements that use the same scope
|
||||||
|
"SIM118", # Use {key} in {dict} instead of {key} in {dict}.keys()
|
||||||
|
"SIM201", # Use {left} != {right} instead of not {left} == {right}
|
||||||
|
"SIM208", # Use {expr} instead of not (not {expr})
|
||||||
|
"SIM212", # Use {a} if {a} else {b} instead of {b} if not {a} else {a}
|
||||||
|
"SIM300", # Yoda conditions. Use 'age == 42' instead of '42 == age'.
|
||||||
|
"SIM401", # Use get from dict with default instead of an if block
|
||||||
|
"T100", # Trace found: {name} used
|
||||||
|
"T20", # flake8-print
|
||||||
|
"TID251", # Banned imports
|
||||||
|
"TRY004", # Prefer TypeError exception for invalid type
|
||||||
|
"TRY203", # Remove exception handler; error is immediately re-raised
|
||||||
|
"UP", # pyupgrade
|
||||||
|
"W", # pycodestyle
|
||||||
|
]
|
||||||
|
|
||||||
|
lint.ignore = [
|
||||||
|
"D202", # No blank lines allowed after function docstring
|
||||||
|
"D203", # 1 blank line required before class docstring
|
||||||
|
"D213", # Multi-line docstring summary should start at the second line
|
||||||
|
"D406", # Section name should end with a newline
|
||||||
|
"D407", # Section name underlining
|
||||||
|
"E501", # line too long
|
||||||
|
"E731", # do not assign a lambda expression, use a def
|
||||||
|
|
||||||
|
# Ignore ignored, as the rule is now back in preview/nursery, which cannot
|
||||||
|
# be ignored anymore without warnings.
|
||||||
|
# https://github.com/astral-sh/ruff/issues/7491
|
||||||
|
# "PLC1901", # Lots of false positives
|
||||||
|
|
||||||
|
# False positives https://github.com/astral-sh/ruff/issues/5386
|
||||||
|
"PLC0208", # Use a sequence type instead of a `set` when iterating over values
|
||||||
|
"PLR0911", # Too many return statements ({returns} > {max_returns})
|
||||||
|
"PLR0912", # Too many branches ({branches} > {max_branches})
|
||||||
|
"PLR0913", # Too many arguments to function call ({c_args} > {max_args})
|
||||||
|
"PLR0915", # Too many statements ({statements} > {max_statements})
|
||||||
|
"PLR2004", # Magic value used in comparison, consider replacing {value} with a constant variable
|
||||||
|
"PLW2901", # Outer {outer_kind} variable {name} overwritten by inner {inner_kind} target
|
||||||
|
"UP006", # keep type annotation style as is
|
||||||
|
"UP007", # keep type annotation style as is
|
||||||
|
# Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923
|
||||||
|
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
|
||||||
|
|
||||||
|
# May conflict with the formatter, https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||||
|
"W191",
|
||||||
|
"E111",
|
||||||
|
"E114",
|
||||||
|
"E117",
|
||||||
|
"D206",
|
||||||
|
"D300",
|
||||||
|
"Q000",
|
||||||
|
"Q001",
|
||||||
|
"Q002",
|
||||||
|
"Q003",
|
||||||
|
"COM812",
|
||||||
|
"COM819",
|
||||||
|
"ISC001",
|
||||||
|
"ISC002",
|
||||||
|
|
||||||
|
# Disabled because ruff does not understand type of __all__ generated by a function
|
||||||
|
"PLE0605",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-import-conventions.extend-aliases]
|
||||||
|
voluptuous = "vol"
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-pytest-style]
|
||||||
|
fixture-parentheses = false
|
||||||
|
|
||||||
|
[tool.ruff.lint.flake8-tidy-imports.banned-api]
|
||||||
|
"pytz".msg = "use zoneinfo instead"
|
||||||
|
|
||||||
|
[tool.ruff.lint.isort]
|
||||||
|
force-sort-within-sections = true
|
||||||
|
section-order = [
|
||||||
|
"future",
|
||||||
|
"standard-library",
|
||||||
|
"third-party",
|
||||||
|
"first-party",
|
||||||
|
"local-folder",
|
||||||
|
]
|
||||||
|
forced-separate = ["tests"]
|
||||||
|
known-first-party = ["supervisor", "tests"]
|
||||||
|
combine-as-imports = true
|
||||||
|
split-on-trailing-comma = false
|
||||||
|
|
||||||
|
[tool.ruff.lint.per-file-ignores]
|
||||||
|
|
||||||
|
# DBus Service Mocks must use typing and names understood by dbus-fast
|
||||||
|
"tests/dbus_service_mocks/*.py" = ["F722", "F821", "N815"]
|
||||||
|
|
||||||
|
[tool.ruff.lint.mccabe]
|
||||||
|
max-complexity = 25
|
||||||
|
@@ -1,28 +1,29 @@
|
|||||||
aiodns==3.1.1
|
aiodns==3.2.0
|
||||||
aiohttp==3.9.1
|
aiohttp==3.11.12
|
||||||
aiohttp-fast-url-dispatcher==0.3.0
|
|
||||||
async_timeout==4.0.3
|
|
||||||
atomicwrites-homeassistant==1.4.1
|
atomicwrites-homeassistant==1.4.1
|
||||||
attrs==23.2.0
|
attrs==25.1.0
|
||||||
awesomeversion==23.11.0
|
awesomeversion==24.6.0
|
||||||
brotli==1.1.0
|
brotli==1.1.0
|
||||||
ciso8601==2.3.1
|
ciso8601==2.3.2
|
||||||
colorlog==6.8.0
|
colorlog==6.9.0
|
||||||
cpe==1.2.1
|
cpe==1.3.1
|
||||||
cryptography==41.0.7
|
cryptography==44.0.1
|
||||||
debugpy==1.8.0
|
debugpy==1.8.12
|
||||||
deepmerge==1.1.1
|
deepmerge==2.0
|
||||||
dirhash==0.2.1
|
dirhash==0.5.0
|
||||||
docker==7.0.0
|
docker==7.1.0
|
||||||
faust-cchardet==2.1.19
|
faust-cchardet==2.1.19
|
||||||
gitpython==3.1.41
|
gitpython==3.1.44
|
||||||
jinja2==3.1.3
|
jinja2==3.1.5
|
||||||
orjson==3.9.10
|
orjson==3.10.12
|
||||||
pulsectl==23.5.2
|
pulsectl==24.12.0
|
||||||
pyudev==0.24.1
|
pyudev==0.24.3
|
||||||
PyYAML==6.0.1
|
PyYAML==6.0.2
|
||||||
securetar==2023.12.0
|
requests==2.32.3
|
||||||
sentry-sdk==1.39.2
|
securetar==2025.2.0
|
||||||
voluptuous==0.14.1
|
sentry-sdk==2.22.0
|
||||||
dbus-fast==2.21.0
|
setuptools==75.8.0
|
||||||
typing_extensions==4.9.0
|
voluptuous==0.15.2
|
||||||
|
dbus-fast==2.33.0
|
||||||
|
typing_extensions==4.12.2
|
||||||
|
zlib-fast==0.2.0
|
||||||
|
@@ -1,16 +1,13 @@
|
|||||||
black==23.12.1
|
astroid==3.3.8
|
||||||
coverage==7.4.0
|
coverage==7.6.12
|
||||||
flake8-docstrings==1.7.0
|
pre-commit==4.1.0
|
||||||
flake8==7.0.0
|
pylint==3.3.4
|
||||||
pre-commit==3.6.0
|
pytest-aiohttp==1.1.0
|
||||||
pydocstyle==6.3.0
|
pytest-asyncio==0.25.2
|
||||||
pylint==3.0.3
|
pytest-cov==6.0.0
|
||||||
pytest-aiohttp==1.0.5
|
pytest-timeout==2.3.1
|
||||||
pytest-asyncio==0.23.3
|
pytest==8.3.4
|
||||||
pytest-cov==4.1.0
|
ruff==0.9.6
|
||||||
pytest-timeout==2.2.0
|
time-machine==2.16.0
|
||||||
pytest==7.4.4
|
typing_extensions==4.12.2
|
||||||
pyupgrade==3.15.0
|
urllib3==2.3.0
|
||||||
time-machine==2.13.0
|
|
||||||
typing_extensions==4.9.0
|
|
||||||
urllib3==2.1.0
|
|
||||||
|
@@ -1,30 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
source "/etc/supervisor_scripts/common"
|
|
||||||
|
|
||||||
set -e
|
|
||||||
|
|
||||||
# Update frontend
|
|
||||||
git submodule update --init --recursive --remote
|
|
||||||
|
|
||||||
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
|
||||||
cd home-assistant-polymer
|
|
||||||
nvm install
|
|
||||||
script/bootstrap
|
|
||||||
|
|
||||||
# Download translations
|
|
||||||
start_docker
|
|
||||||
./script/translations_download
|
|
||||||
|
|
||||||
# build frontend
|
|
||||||
cd hassio
|
|
||||||
./script/build_hassio
|
|
||||||
|
|
||||||
# Copy frontend
|
|
||||||
rm -rf ../../supervisor/api/panel/*
|
|
||||||
cp -rf build/* ../../supervisor/api/panel/
|
|
||||||
|
|
||||||
# Reset frontend git
|
|
||||||
cd ..
|
|
||||||
git reset --hard HEAD
|
|
||||||
|
|
||||||
stop_docker
|
|
17
setup.cfg
17
setup.cfg
@@ -1,17 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
exclude = .venv,.git,.tox,docs,venv,bin,lib,deps,build
|
|
||||||
doctests = True
|
|
||||||
max-line-length = 88
|
|
||||||
# E501: line too long
|
|
||||||
# W503: Line break occurred before a binary operator
|
|
||||||
# E203: Whitespace before ':'
|
|
||||||
# D202 No blank lines allowed after function docstring
|
|
||||||
# W504 line break after binary operator
|
|
||||||
ignore =
|
|
||||||
E501,
|
|
||||||
W503,
|
|
||||||
E203,
|
|
||||||
D202,
|
|
||||||
W504
|
|
||||||
per-file-ignores =
|
|
||||||
tests/dbus_service_mocks/*.py: F821,F722
|
|
3
setup.py
3
setup.py
@@ -1,4 +1,5 @@
|
|||||||
"""Home Assistant Supervisor setup."""
|
"""Home Assistant Supervisor setup."""
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import re
|
import re
|
||||||
|
|
||||||
@@ -18,7 +19,7 @@ def _get_supervisor_version():
|
|||||||
for line in CONSTANTS.split("/n"):
|
for line in CONSTANTS.split("/n"):
|
||||||
if match := RE_SUPERVISOR_VERSION.match(line):
|
if match := RE_SUPERVISOR_VERSION.match(line):
|
||||||
return match.group(1)
|
return match.group(1)
|
||||||
return "99.9.9dev"
|
return "9999.09.9.dev9999"
|
||||||
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
|
@@ -1,12 +1,20 @@
|
|||||||
"""Main file for Supervisor."""
|
"""Main file for Supervisor."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from supervisor import bootstrap
|
import zlib_fast
|
||||||
from supervisor.utils.logging import activate_log_queue_handler
|
|
||||||
|
# Enable fast zlib before importing supervisor
|
||||||
|
zlib_fast.enable()
|
||||||
|
|
||||||
|
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
|
||||||
|
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
|
||||||
|
activate_log_queue_handler,
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@@ -1,9 +1,12 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
|
from datetime import datetime
|
||||||
import errno
|
import errno
|
||||||
|
from functools import partial
|
||||||
from ipaddress import IPv4Address
|
from ipaddress import IPv4Address
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path, PurePath
|
from pathlib import Path, PurePath
|
||||||
@@ -15,11 +18,14 @@ from tempfile import TemporaryDirectory
|
|||||||
from typing import Any, Final
|
from typing import Any, Final
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
|
from awesomeversion import AwesomeVersionCompareException
|
||||||
from deepmerge import Merger
|
from deepmerge import Merger
|
||||||
from securetar import atomic_contents_add, secure_path
|
from securetar import atomic_contents_add, secure_path
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.humanize import humanize_error
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
from supervisor.utils.dt import utc_from_timestamp
|
||||||
|
|
||||||
from ..bus import EventListener
|
from ..bus import EventListener
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ACCESS_TOKEN,
|
ATTR_ACCESS_TOKEN,
|
||||||
@@ -42,13 +48,17 @@ from ..const import (
|
|||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
|
ATTR_SYSTEM_MANAGED,
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_USER,
|
ATTR_USER,
|
||||||
ATTR_UUID,
|
ATTR_UUID,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
ATTR_VERSION_TIMESTAMP,
|
||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
DNS_SUFFIX,
|
DNS_SUFFIX,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
|
AddonBootConfig,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
AddonState,
|
AddonState,
|
||||||
BusEvent,
|
BusEvent,
|
||||||
@@ -72,7 +82,8 @@ from ..hardware.data import Device
|
|||||||
from ..homeassistant.const import WSEvent, WSType
|
from ..homeassistant.const import WSEvent, WSType
|
||||||
from ..jobs.const import JobExecutionLimit
|
from ..jobs.const import JobExecutionLimit
|
||||||
from ..jobs.decorator import Job
|
from ..jobs.decorator import Job
|
||||||
from ..resolution.const import UnhealthyReason
|
from ..resolution.const import ContextType, IssueType, UnhealthyReason
|
||||||
|
from ..resolution.data import Issue
|
||||||
from ..store.addon import AddonStore
|
from ..store.addon import AddonStore
|
||||||
from ..utils import check_port
|
from ..utils import check_port
|
||||||
from ..utils.apparmor import adjust_profile
|
from ..utils.apparmor import adjust_profile
|
||||||
@@ -135,11 +146,27 @@ class Addon(AddonModel):
|
|||||||
self._listeners: list[EventListener] = []
|
self._listeners: list[EventListener] = []
|
||||||
self._startup_event = asyncio.Event()
|
self._startup_event = asyncio.Event()
|
||||||
self._startup_task: asyncio.Task | None = None
|
self._startup_task: asyncio.Task | None = None
|
||||||
|
self._boot_failed_issue = Issue(
|
||||||
|
IssueType.BOOT_FAIL, ContextType.ADDON, reference=self.slug
|
||||||
|
)
|
||||||
|
self._device_access_missing_issue = Issue(
|
||||||
|
IssueType.DEVICE_ACCESS_MISSING, ContextType.ADDON, reference=self.slug
|
||||||
|
)
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""Return internal representation."""
|
"""Return internal representation."""
|
||||||
return f"<Addon: {self.slug}>"
|
return f"<Addon: {self.slug}>"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def boot_failed_issue(self) -> Issue:
|
||||||
|
"""Get issue used if start on boot failed."""
|
||||||
|
return self._boot_failed_issue
|
||||||
|
|
||||||
|
@property
|
||||||
|
def device_access_missing_issue(self) -> Issue:
|
||||||
|
"""Get issue used if device access is missing and can't be automatically added."""
|
||||||
|
return self._device_access_missing_issue
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def state(self) -> AddonState:
|
def state(self) -> AddonState:
|
||||||
"""Return state of the add-on."""
|
"""Return state of the add-on."""
|
||||||
@@ -157,6 +184,20 @@ class Addon(AddonModel):
|
|||||||
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
|
if new_state == AddonState.STARTED or old_state == AddonState.STARTUP:
|
||||||
self._startup_event.set()
|
self._startup_event.set()
|
||||||
|
|
||||||
|
# Dismiss boot failed issue if present and we started
|
||||||
|
if (
|
||||||
|
new_state == AddonState.STARTED
|
||||||
|
and self.boot_failed_issue in self.sys_resolution.issues
|
||||||
|
):
|
||||||
|
self.sys_resolution.dismiss_issue(self.boot_failed_issue)
|
||||||
|
|
||||||
|
# Dismiss device access missing issue if present and we stopped
|
||||||
|
if (
|
||||||
|
new_state == AddonState.STOPPED
|
||||||
|
and self.device_access_missing_issue in self.sys_resolution.issues
|
||||||
|
):
|
||||||
|
self.sys_resolution.dismiss_issue(self.device_access_missing_issue)
|
||||||
|
|
||||||
self.sys_homeassistant.websocket.send_message(
|
self.sys_homeassistant.websocket.send_message(
|
||||||
{
|
{
|
||||||
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
|
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
|
||||||
@@ -175,6 +216,9 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Async initialize of object."""
|
"""Async initialize of object."""
|
||||||
|
if self.is_detached:
|
||||||
|
await super().refresh_path_cache()
|
||||||
|
|
||||||
self._listeners.append(
|
self._listeners.append(
|
||||||
self.sys_bus.register_event(
|
self.sys_bus.register_event(
|
||||||
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
|
BusEvent.DOCKER_CONTAINER_STATE_CHANGE, self.container_state_changed
|
||||||
@@ -187,9 +231,20 @@ class Addon(AddonModel):
|
|||||||
)
|
)
|
||||||
|
|
||||||
await self._check_ingress_port()
|
await self._check_ingress_port()
|
||||||
with suppress(DockerError):
|
default_image = self._image(self.data)
|
||||||
|
try:
|
||||||
await self.instance.attach(version=self.version)
|
await self.instance.attach(version=self.version)
|
||||||
|
|
||||||
|
# Ensure we are using correct image for this system
|
||||||
|
await self.instance.check_image(self.version, default_image, self.arch)
|
||||||
|
except DockerError:
|
||||||
|
_LOGGER.info("No %s addon Docker image %s found", self.slug, self.image)
|
||||||
|
with suppress(DockerError):
|
||||||
|
await self.instance.install(self.version, default_image, arch=self.arch)
|
||||||
|
|
||||||
|
self.persist[ATTR_IMAGE] = default_image
|
||||||
|
self.save_persist()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ip_address(self) -> IPv4Address:
|
def ip_address(self) -> IPv4Address:
|
||||||
"""Return IP of add-on instance."""
|
"""Return IP of add-on instance."""
|
||||||
@@ -225,6 +280,34 @@ class Addon(AddonModel):
|
|||||||
"""Return True if add-on is detached."""
|
"""Return True if add-on is detached."""
|
||||||
return self.slug not in self.sys_store.data.addons
|
return self.slug not in self.sys_store.data.addons
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_icon(self) -> bool:
|
||||||
|
"""Return True if an icon exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_icon
|
||||||
|
return self.addon_store.with_icon
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_logo(self) -> bool:
|
||||||
|
"""Return True if a logo exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_logo
|
||||||
|
return self.addon_store.with_logo
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_changelog(self) -> bool:
|
||||||
|
"""Return True if a changelog exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_changelog
|
||||||
|
return self.addon_store.with_changelog
|
||||||
|
|
||||||
|
@property
|
||||||
|
def with_documentation(self) -> bool:
|
||||||
|
"""Return True if a documentation exists."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().with_documentation
|
||||||
|
return self.addon_store.with_documentation
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def available(self) -> bool:
|
def available(self) -> bool:
|
||||||
"""Return True if this add-on is available on this platform."""
|
"""Return True if this add-on is available on this platform."""
|
||||||
@@ -261,7 +344,9 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self) -> AddonBoot:
|
def boot(self) -> AddonBoot:
|
||||||
"""Return boot config with prio local settings."""
|
"""Return boot config with prio local settings unless config is forced."""
|
||||||
|
if self.boot_config == AddonBootConfig.MANUAL_ONLY:
|
||||||
|
return super().boot
|
||||||
return self.persist.get(ATTR_BOOT, super().boot)
|
return self.persist.get(ATTR_BOOT, super().boot)
|
||||||
|
|
||||||
@boot.setter
|
@boot.setter
|
||||||
@@ -269,6 +354,13 @@ class Addon(AddonModel):
|
|||||||
"""Store user boot options."""
|
"""Store user boot options."""
|
||||||
self.persist[ATTR_BOOT] = value
|
self.persist[ATTR_BOOT] = value
|
||||||
|
|
||||||
|
# Dismiss boot failed issue if present and boot at start disabled
|
||||||
|
if (
|
||||||
|
value == AddonBoot.MANUAL
|
||||||
|
and self._boot_failed_issue in self.sys_resolution.issues
|
||||||
|
):
|
||||||
|
self.sys_resolution.dismiss_issue(self._boot_failed_issue)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auto_update(self) -> bool:
|
def auto_update(self) -> bool:
|
||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
@@ -279,6 +371,28 @@ class Addon(AddonModel):
|
|||||||
"""Set auto update."""
|
"""Set auto update."""
|
||||||
self.persist[ATTR_AUTO_UPDATE] = value
|
self.persist[ATTR_AUTO_UPDATE] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def auto_update_available(self) -> bool:
|
||||||
|
"""Return if it is safe to auto update addon."""
|
||||||
|
if not self.need_update or not self.auto_update:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for version in self.breaking_versions:
|
||||||
|
try:
|
||||||
|
# Must update to latest so if true update crosses a breaking version
|
||||||
|
if self.version < version:
|
||||||
|
return False
|
||||||
|
except AwesomeVersionCompareException:
|
||||||
|
# If version scheme changed, we may get compare exception
|
||||||
|
# If latest version >= breaking version then assume update will
|
||||||
|
# cross it as the version scheme changes
|
||||||
|
# If both versions have compare exception, ignore as its in the past
|
||||||
|
with suppress(AwesomeVersionCompareException):
|
||||||
|
if self.latest_version >= version:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def watchdog(self) -> bool:
|
def watchdog(self) -> bool:
|
||||||
"""Return True if watchdog is enable."""
|
"""Return True if watchdog is enable."""
|
||||||
@@ -294,6 +408,37 @@ class Addon(AddonModel):
|
|||||||
else:
|
else:
|
||||||
self.persist[ATTR_WATCHDOG] = value
|
self.persist[ATTR_WATCHDOG] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def system_managed(self) -> bool:
|
||||||
|
"""Return True if addon is managed by Home Assistant."""
|
||||||
|
return self.persist[ATTR_SYSTEM_MANAGED]
|
||||||
|
|
||||||
|
@system_managed.setter
|
||||||
|
def system_managed(self, value: bool) -> None:
|
||||||
|
"""Set system managed enable/disable."""
|
||||||
|
if not value and self.system_managed_config_entry:
|
||||||
|
self.system_managed_config_entry = None
|
||||||
|
|
||||||
|
self.persist[ATTR_SYSTEM_MANAGED] = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def system_managed_config_entry(self) -> str | None:
|
||||||
|
"""Return id of config entry managing this addon (if any)."""
|
||||||
|
if not self.system_managed:
|
||||||
|
return None
|
||||||
|
return self.persist.get(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY)
|
||||||
|
|
||||||
|
@system_managed_config_entry.setter
|
||||||
|
def system_managed_config_entry(self, value: str | None) -> None:
|
||||||
|
"""Set ID of config entry managing this addon."""
|
||||||
|
if not self.system_managed:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Ignoring system managed config entry for %s because it is not system managed",
|
||||||
|
self.slug,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.persist[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY] = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def uuid(self) -> str:
|
def uuid(self) -> str:
|
||||||
"""Return an API token for this add-on."""
|
"""Return an API token for this add-on."""
|
||||||
@@ -321,6 +466,11 @@ class Addon(AddonModel):
|
|||||||
"""Return version of add-on."""
|
"""Return version of add-on."""
|
||||||
return self.data_store[ATTR_VERSION]
|
return self.data_store[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version_timestamp(self) -> datetime:
|
||||||
|
"""Return when latest version was first seen."""
|
||||||
|
return utc_from_timestamp(self.data_store[ATTR_VERSION_TIMESTAMP])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def protected(self) -> bool:
|
def protected(self) -> bool:
|
||||||
"""Return if add-on is in protected mode."""
|
"""Return if add-on is in protected mode."""
|
||||||
@@ -655,10 +805,12 @@ class Addon(AddonModel):
|
|||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
on_condition=AddonsJobError,
|
on_condition=AddonsJobError,
|
||||||
)
|
)
|
||||||
async def uninstall(self) -> None:
|
async def uninstall(
|
||||||
|
self, *, remove_config: bool, remove_image: bool = True
|
||||||
|
) -> None:
|
||||||
"""Uninstall and cleanup this addon."""
|
"""Uninstall and cleanup this addon."""
|
||||||
try:
|
try:
|
||||||
await self.instance.remove()
|
await self.instance.remove(remove_image=remove_image)
|
||||||
except DockerError as err:
|
except DockerError as err:
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
@@ -666,6 +818,10 @@ class Addon(AddonModel):
|
|||||||
|
|
||||||
await self.unload()
|
await self.unload()
|
||||||
|
|
||||||
|
# Remove config if present and requested
|
||||||
|
if self.addon_config_used and remove_config:
|
||||||
|
await remove_data(self.path_config)
|
||||||
|
|
||||||
# Cleanup audio settings
|
# Cleanup audio settings
|
||||||
if self.path_pulse.exists():
|
if self.path_pulse.exists():
|
||||||
with suppress(OSError):
|
with suppress(OSError):
|
||||||
@@ -770,6 +926,7 @@ class Addon(AddonModel):
|
|||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
self.sys_addons.data.update(self.addon_store)
|
self.sys_addons.data.update(self.addon_store)
|
||||||
|
await self._check_ingress_port()
|
||||||
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
@@ -1051,6 +1208,25 @@ class Addon(AddonModel):
|
|||||||
await self._backup_command(self.backup_post)
|
await self._backup_command(self.backup_post)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def _is_excluded_by_filter(
|
||||||
|
self, origin_path: Path, arcname: str, item_arcpath: PurePath
|
||||||
|
) -> bool:
|
||||||
|
"""Filter out files from backup based on filters provided by addon developer.
|
||||||
|
|
||||||
|
This tests the dev provided filters against the full path of the file as
|
||||||
|
Supervisor sees them using match. This is done for legacy reasons, testing
|
||||||
|
against the relative path makes more sense and may be changed in the future.
|
||||||
|
"""
|
||||||
|
full_path = origin_path / item_arcpath.relative_to(arcname)
|
||||||
|
|
||||||
|
for exclude in self.backup_exclude:
|
||||||
|
if not full_path.match(exclude):
|
||||||
|
continue
|
||||||
|
_LOGGER.debug("Ignoring %s because of %s", full_path, exclude)
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
name="addon_backup",
|
name="addon_backup",
|
||||||
limit=JobExecutionLimit.GROUP_ONCE,
|
limit=JobExecutionLimit.GROUP_ONCE,
|
||||||
@@ -1062,46 +1238,45 @@ class Addon(AddonModel):
|
|||||||
Returns a Task that completes when addon has state 'started' (see start)
|
Returns a Task that completes when addon has state 'started' (see start)
|
||||||
for cold backup. Else nothing is returned.
|
for cold backup. Else nothing is returned.
|
||||||
"""
|
"""
|
||||||
wait_for_start: Awaitable[None] | None = None
|
|
||||||
|
|
||||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
def _addon_backup(
|
||||||
temp_path = Path(temp)
|
store_image: bool,
|
||||||
|
metadata: dict[str, Any],
|
||||||
|
apparmor_profile: str | None,
|
||||||
|
addon_config_used: bool,
|
||||||
|
):
|
||||||
|
"""Start the backup process."""
|
||||||
|
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||||
|
temp_path = Path(temp)
|
||||||
|
|
||||||
# store local image
|
# store local image
|
||||||
if self.need_build:
|
if store_image:
|
||||||
|
try:
|
||||||
|
self.instance.export_image(temp_path.joinpath("image.tar"))
|
||||||
|
except DockerError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
# Store local configs/state
|
||||||
try:
|
try:
|
||||||
await self.instance.export_image(temp_path.joinpath("image.tar"))
|
write_json_file(temp_path.joinpath("addon.json"), metadata)
|
||||||
except DockerError as err:
|
except ConfigurationFileError as err:
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
data = {
|
|
||||||
ATTR_USER: self.persist,
|
|
||||||
ATTR_SYSTEM: self.data,
|
|
||||||
ATTR_VERSION: self.version,
|
|
||||||
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
|
||||||
}
|
|
||||||
|
|
||||||
# Store local configs/state
|
|
||||||
try:
|
|
||||||
write_json_file(temp_path.joinpath("addon.json"), data)
|
|
||||||
except ConfigurationFileError as err:
|
|
||||||
raise AddonsError(
|
|
||||||
f"Can't save meta for {self.slug}", _LOGGER.error
|
|
||||||
) from err
|
|
||||||
|
|
||||||
# Store AppArmor Profile
|
|
||||||
if self.sys_host.apparmor.exists(self.slug):
|
|
||||||
profile = temp_path.joinpath("apparmor.txt")
|
|
||||||
try:
|
|
||||||
await self.sys_host.apparmor.backup_profile(self.slug, profile)
|
|
||||||
except HostAppArmorError as err:
|
|
||||||
raise AddonsError(
|
raise AddonsError(
|
||||||
"Can't backup AppArmor profile", _LOGGER.error
|
f"Can't save meta for {self.slug}", _LOGGER.error
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
# write into tarfile
|
# Store AppArmor Profile
|
||||||
def _write_tarfile():
|
if apparmor_profile:
|
||||||
"""Write tar inside loop."""
|
profile_backup_file = temp_path.joinpath("apparmor.txt")
|
||||||
|
try:
|
||||||
|
self.sys_host.apparmor.backup_profile(
|
||||||
|
apparmor_profile, profile_backup_file
|
||||||
|
)
|
||||||
|
except HostAppArmorError as err:
|
||||||
|
raise AddonsError(
|
||||||
|
"Can't backup AppArmor profile", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
|
||||||
|
# Write tarfile
|
||||||
with tar_file as backup:
|
with tar_file as backup:
|
||||||
# Backup metadata
|
# Backup metadata
|
||||||
backup.add(temp, arcname=".")
|
backup.add(temp, arcname=".")
|
||||||
@@ -1110,32 +1285,56 @@ class Addon(AddonModel):
|
|||||||
atomic_contents_add(
|
atomic_contents_add(
|
||||||
backup,
|
backup,
|
||||||
self.path_data,
|
self.path_data,
|
||||||
excludes=self.backup_exclude,
|
file_filter=partial(
|
||||||
|
self._is_excluded_by_filter, self.path_data, "data"
|
||||||
|
),
|
||||||
arcname="data",
|
arcname="data",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Backup config
|
# Backup config
|
||||||
if self.addon_config_used:
|
if addon_config_used:
|
||||||
atomic_contents_add(
|
atomic_contents_add(
|
||||||
backup,
|
backup,
|
||||||
self.path_config,
|
self.path_config,
|
||||||
excludes=self.backup_exclude,
|
file_filter=partial(
|
||||||
|
self._is_excluded_by_filter, self.path_config, "config"
|
||||||
|
),
|
||||||
arcname="config",
|
arcname="config",
|
||||||
)
|
)
|
||||||
|
|
||||||
is_running = await self.begin_backup()
|
wait_for_start: Awaitable[None] | None = None
|
||||||
try:
|
|
||||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
data = {
|
||||||
await self.sys_run_in_executor(_write_tarfile)
|
ATTR_USER: self.persist,
|
||||||
except (tarfile.TarError, OSError) as err:
|
ATTR_SYSTEM: self.data,
|
||||||
raise AddonsError(
|
ATTR_VERSION: self.version,
|
||||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
||||||
) from err
|
}
|
||||||
finally:
|
apparmor_profile = (
|
||||||
if is_running:
|
self.slug if self.sys_host.apparmor.exists(self.slug) else None
|
||||||
wait_for_start = await self.end_backup()
|
)
|
||||||
|
|
||||||
|
was_running = await self.begin_backup()
|
||||||
|
try:
|
||||||
|
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||||
|
await self.sys_run_in_executor(
|
||||||
|
partial(
|
||||||
|
_addon_backup,
|
||||||
|
store_image=self.need_build,
|
||||||
|
metadata=data,
|
||||||
|
apparmor_profile=apparmor_profile,
|
||||||
|
addon_config_used=self.addon_config_used,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||||
|
except (tarfile.TarError, OSError) as err:
|
||||||
|
raise AddonsError(
|
||||||
|
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
finally:
|
||||||
|
if was_running:
|
||||||
|
wait_for_start = await self.end_backup()
|
||||||
|
|
||||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
|
|
||||||
@Job(
|
@Job(
|
||||||
@@ -1150,30 +1349,36 @@ class Addon(AddonModel):
|
|||||||
if addon is started after restore. Else nothing is returned.
|
if addon is started after restore. Else nothing is returned.
|
||||||
"""
|
"""
|
||||||
wait_for_start: Awaitable[None] | None = None
|
wait_for_start: Awaitable[None] | None = None
|
||||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
|
||||||
# extract backup
|
# Extract backup
|
||||||
def _extract_tarfile():
|
def _extract_tarfile() -> tuple[TemporaryDirectory, dict[str, Any]]:
|
||||||
"""Extract tar backup."""
|
"""Extract tar backup."""
|
||||||
|
tmp = TemporaryDirectory(dir=self.sys_config.path_tmp)
|
||||||
|
try:
|
||||||
with tar_file as backup:
|
with tar_file as backup:
|
||||||
backup.extractall(
|
backup.extractall(
|
||||||
path=Path(temp),
|
path=tmp.name,
|
||||||
members=secure_path(backup),
|
members=secure_path(backup),
|
||||||
filter="fully_trusted",
|
filter="fully_trusted",
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
data = read_json_file(Path(tmp.name, "addon.json"))
|
||||||
await self.sys_run_in_executor(_extract_tarfile)
|
except:
|
||||||
except tarfile.TarError as err:
|
tmp.cleanup()
|
||||||
raise AddonsError(
|
raise
|
||||||
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
|
||||||
) from err
|
|
||||||
|
|
||||||
# Read backup data
|
return tmp, data
|
||||||
try:
|
|
||||||
data = read_json_file(Path(temp, "addon.json"))
|
|
||||||
except ConfigurationFileError as err:
|
|
||||||
raise AddonsError() from err
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
tmp, data = await self.sys_run_in_executor(_extract_tarfile)
|
||||||
|
except tarfile.TarError as err:
|
||||||
|
raise AddonsError(
|
||||||
|
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
||||||
|
) from err
|
||||||
|
except ConfigurationFileError as err:
|
||||||
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
try:
|
||||||
# Validate
|
# Validate
|
||||||
try:
|
try:
|
||||||
data = SCHEMA_ADDON_BACKUP(data)
|
data = SCHEMA_ADDON_BACKUP(data)
|
||||||
@@ -1207,7 +1412,7 @@ class Addon(AddonModel):
|
|||||||
if not await self.instance.exists():
|
if not await self.instance.exists():
|
||||||
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
||||||
|
|
||||||
image_file = Path(temp, "image.tar")
|
image_file = Path(tmp.name, "image.tar")
|
||||||
if image_file.is_file():
|
if image_file.is_file():
|
||||||
with suppress(DockerError):
|
with suppress(DockerError):
|
||||||
await self.instance.import_image(image_file)
|
await self.instance.import_image(image_file)
|
||||||
@@ -1221,18 +1426,18 @@ class Addon(AddonModel):
|
|||||||
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
_LOGGER.info("Restore/Update of image for addon %s", self.slug)
|
||||||
with suppress(DockerError):
|
with suppress(DockerError):
|
||||||
await self.instance.update(version, restore_image, self.arch)
|
await self.instance.update(version, restore_image, self.arch)
|
||||||
self._check_ingress_port()
|
await self._check_ingress_port()
|
||||||
|
|
||||||
# Restore data and config
|
# Restore data and config
|
||||||
def _restore_data():
|
def _restore_data():
|
||||||
"""Restore data and config."""
|
"""Restore data and config."""
|
||||||
temp_data = Path(temp, "data")
|
temp_data = Path(tmp.name, "data")
|
||||||
if temp_data.is_dir():
|
if temp_data.is_dir():
|
||||||
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
||||||
else:
|
else:
|
||||||
self.path_data.mkdir()
|
self.path_data.mkdir()
|
||||||
|
|
||||||
temp_config = Path(temp, "config")
|
temp_config = Path(tmp.name, "config")
|
||||||
if temp_config.is_dir():
|
if temp_config.is_dir():
|
||||||
shutil.copytree(temp_config, self.path_config, symlinks=True)
|
shutil.copytree(temp_config, self.path_config, symlinks=True)
|
||||||
elif self.addon_config_used:
|
elif self.addon_config_used:
|
||||||
@@ -1252,7 +1457,7 @@ class Addon(AddonModel):
|
|||||||
) from err
|
) from err
|
||||||
|
|
||||||
# Restore AppArmor
|
# Restore AppArmor
|
||||||
profile_file = Path(temp, "apparmor.txt")
|
profile_file = Path(tmp.name, "apparmor.txt")
|
||||||
if profile_file.exists():
|
if profile_file.exists():
|
||||||
try:
|
try:
|
||||||
await self.sys_host.apparmor.load_profile(
|
await self.sys_host.apparmor.load_profile(
|
||||||
@@ -1260,19 +1465,21 @@ class Addon(AddonModel):
|
|||||||
)
|
)
|
||||||
except HostAppArmorError as err:
|
except HostAppArmorError as err:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Can't restore AppArmor profile for add-on %s", self.slug
|
"Can't restore AppArmor profile for add-on %s",
|
||||||
|
self.slug,
|
||||||
)
|
)
|
||||||
raise AddonsError() from err
|
raise AddonsError() from err
|
||||||
|
|
||||||
|
finally:
|
||||||
# Is add-on loaded
|
# Is add-on loaded
|
||||||
if not self.loaded:
|
if not self.loaded:
|
||||||
await self.load()
|
await self.load()
|
||||||
|
|
||||||
finally:
|
|
||||||
# Run add-on
|
# Run add-on
|
||||||
if data[ATTR_STATE] == AddonState.STARTED:
|
if data[ATTR_STATE] == AddonState.STARTED:
|
||||||
wait_for_start = await self.start()
|
wait_for_start = await self.start()
|
||||||
|
finally:
|
||||||
|
tmp.cleanup()
|
||||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||||
return wait_for_start
|
return wait_for_start
|
||||||
|
|
||||||
@@ -1362,3 +1569,9 @@ class Addon(AddonModel):
|
|||||||
ContainerState.UNHEALTHY,
|
ContainerState.UNHEALTHY,
|
||||||
]:
|
]:
|
||||||
await self._restart_after_problem(event.state)
|
await self._restart_after_problem(event.state)
|
||||||
|
|
||||||
|
def refresh_path_cache(self) -> Awaitable[None]:
|
||||||
|
"""Refresh cache of existing paths."""
|
||||||
|
if self.is_detached:
|
||||||
|
return super().refresh_path_cache()
|
||||||
|
return self.addon_store.refresh_path_cache()
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Supervisor add-on build environment."""
|
"""Supervisor add-on build environment."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from functools import cached_property
|
from functools import cached_property
|
||||||
@@ -102,11 +103,11 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
|||||||
except HassioArchNotFound:
|
except HassioArchNotFound:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def get_docker_args(self, version: AwesomeVersion):
|
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
||||||
"""Create a dict with Docker build arguments."""
|
"""Create a dict with Docker build arguments."""
|
||||||
args = {
|
args = {
|
||||||
"path": str(self.addon.path_location),
|
"path": str(self.addon.path_location),
|
||||||
"tag": f"{self.addon.image}:{version!s}",
|
"tag": f"{image or self.addon.image}:{version!s}",
|
||||||
"dockerfile": str(self.dockerfile),
|
"dockerfile": str(self.dockerfile),
|
||||||
"pull": True,
|
"pull": True,
|
||||||
"forcerm": not self.sys_dev,
|
"forcerm": not self.sys_dev,
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Add-on static data."""
|
"""Add-on static data."""
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from enum import StrEnum
|
from enum import StrEnum
|
||||||
|
|
||||||
@@ -28,6 +29,7 @@ class MappingType(StrEnum):
|
|||||||
|
|
||||||
|
|
||||||
ATTR_BACKUP = "backup"
|
ATTR_BACKUP = "backup"
|
||||||
|
ATTR_BREAKING_VERSIONS = "breaking_versions"
|
||||||
ATTR_CODENOTARY = "codenotary"
|
ATTR_CODENOTARY = "codenotary"
|
||||||
ATTR_READ_ONLY = "read_only"
|
ATTR_READ_ONLY = "read_only"
|
||||||
ATTR_PATH = "path"
|
ATTR_PATH = "path"
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor add-on data."""
|
"""Init file for Supervisor add-on data."""
|
||||||
|
|
||||||
from copy import deepcopy
|
from copy import deepcopy
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Supervisor add-on manager."""
|
"""Supervisor add-on manager."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
@@ -6,24 +7,22 @@ import logging
|
|||||||
import tarfile
|
import tarfile
|
||||||
from typing import Union
|
from typing import Union
|
||||||
|
|
||||||
|
from attr import evolve
|
||||||
|
|
||||||
from ..const import AddonBoot, AddonStartup, AddonState
|
from ..const import AddonBoot, AddonStartup, AddonState
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import (
|
from ..exceptions import (
|
||||||
AddonConfigurationError,
|
|
||||||
AddonsError,
|
AddonsError,
|
||||||
AddonsJobError,
|
AddonsJobError,
|
||||||
AddonsNotSupportedError,
|
AddonsNotSupportedError,
|
||||||
CoreDNSError,
|
CoreDNSError,
|
||||||
DockerAPIError,
|
|
||||||
DockerError,
|
DockerError,
|
||||||
DockerNotFound,
|
|
||||||
HassioError,
|
HassioError,
|
||||||
HomeAssistantAPIError,
|
HomeAssistantAPIError,
|
||||||
)
|
)
|
||||||
from ..jobs.decorator import Job, JobCondition
|
from ..jobs.decorator import Job, JobCondition
|
||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
from ..store.addon import AddonStore
|
from ..store.addon import AddonStore
|
||||||
from ..utils import check_exception_chain
|
|
||||||
from ..utils.sentry import capture_exception
|
from ..utils.sentry import capture_exception
|
||||||
from .addon import Addon
|
from .addon import Addon
|
||||||
from .const import ADDON_UPDATE_CONDITIONS
|
from .const import ADDON_UPDATE_CONDITIONS
|
||||||
@@ -77,15 +76,20 @@ class AddonManager(CoreSysAttributes):
|
|||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Start up add-on management."""
|
"""Start up add-on management."""
|
||||||
tasks = []
|
# Refresh cache for all store addons
|
||||||
|
tasks: list[Awaitable[None]] = [
|
||||||
|
store.refresh_path_cache() for store in self.store.values()
|
||||||
|
]
|
||||||
|
|
||||||
|
# Load all installed addons
|
||||||
for slug in self.data.system:
|
for slug in self.data.system:
|
||||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||||
tasks.append(self.sys_create_task(addon.load()))
|
tasks.append(addon.load())
|
||||||
|
|
||||||
# Run initial tasks
|
# Run initial tasks
|
||||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
_LOGGER.info("Found %d installed add-ons", len(self.data.system))
|
||||||
if tasks:
|
if tasks:
|
||||||
await asyncio.wait(tasks)
|
await asyncio.gather(*tasks)
|
||||||
|
|
||||||
# Sync DNS
|
# Sync DNS
|
||||||
await self.sync_dns()
|
await self.sync_dns()
|
||||||
@@ -112,15 +116,14 @@ class AddonManager(CoreSysAttributes):
|
|||||||
try:
|
try:
|
||||||
if start_task := await addon.start():
|
if start_task := await addon.start():
|
||||||
wait_boot.append(start_task)
|
wait_boot.append(start_task)
|
||||||
except AddonsError as err:
|
|
||||||
# Check if there is an system/user issue
|
|
||||||
if check_exception_chain(
|
|
||||||
err, (DockerAPIError, DockerNotFound, AddonConfigurationError)
|
|
||||||
):
|
|
||||||
addon.boot = AddonBoot.MANUAL
|
|
||||||
addon.save_persist()
|
|
||||||
except HassioError:
|
except HassioError:
|
||||||
pass # These are already handled
|
self.sys_resolution.add_issue(
|
||||||
|
evolve(addon.boot_failed_issue),
|
||||||
|
suggestions=[
|
||||||
|
SuggestionType.EXECUTE_START,
|
||||||
|
SuggestionType.DISABLE_BOOT,
|
||||||
|
],
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -129,6 +132,19 @@ class AddonManager(CoreSysAttributes):
|
|||||||
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
# Ignore exceptions from waiting for addon startup, addon errors handled elsewhere
|
||||||
await asyncio.gather(*wait_boot, return_exceptions=True)
|
await asyncio.gather(*wait_boot, return_exceptions=True)
|
||||||
|
|
||||||
|
# After waiting for startup, create an issue for boot addons that are error or unknown state
|
||||||
|
# Ignore stopped as single shot addons can be run at boot and this is successful exit
|
||||||
|
# Timeout waiting for startup is not a failure, addon is probably just slow
|
||||||
|
for addon in tasks:
|
||||||
|
if addon.state in {AddonState.ERROR, AddonState.UNKNOWN}:
|
||||||
|
self.sys_resolution.add_issue(
|
||||||
|
evolve(addon.boot_failed_issue),
|
||||||
|
suggestions=[
|
||||||
|
SuggestionType.EXECUTE_START,
|
||||||
|
SuggestionType.DISABLE_BOOT,
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
async def shutdown(self, stage: AddonStartup) -> None:
|
async def shutdown(self, stage: AddonStartup) -> None:
|
||||||
"""Shutdown addons."""
|
"""Shutdown addons."""
|
||||||
tasks: list[Addon] = []
|
tasks: list[Addon] = []
|
||||||
@@ -173,13 +189,21 @@ class AddonManager(CoreSysAttributes):
|
|||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||||
|
|
||||||
async def uninstall(self, slug: str) -> None:
|
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
||||||
"""Remove an add-on."""
|
"""Remove an add-on."""
|
||||||
if slug not in self.local:
|
if slug not in self.local:
|
||||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||||
return
|
return
|
||||||
|
|
||||||
await self.local[slug].uninstall()
|
shared_image = any(
|
||||||
|
self.local[slug].image == addon.image
|
||||||
|
and self.local[slug].version == addon.version
|
||||||
|
for addon in self.installed
|
||||||
|
if addon.slug != slug
|
||||||
|
)
|
||||||
|
await self.local[slug].uninstall(
|
||||||
|
remove_config=remove_config, remove_image=not shared_image
|
||||||
|
)
|
||||||
|
|
||||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||||
|
|
||||||
|
@@ -1,14 +1,18 @@
|
|||||||
"""Init file for Supervisor add-ons."""
|
"""Init file for Supervisor add-ons."""
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from collections.abc import Callable
|
from collections.abc import Awaitable, Callable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
|
from datetime import datetime
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||||
|
|
||||||
|
from supervisor.utils.dt import utc_from_timestamp
|
||||||
|
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADVANCED,
|
ATTR_ADVANCED,
|
||||||
ATTR_APPARMOR,
|
ATTR_APPARMOR,
|
||||||
@@ -43,7 +47,7 @@ from ..const import (
|
|||||||
ATTR_JOURNALD,
|
ATTR_JOURNALD,
|
||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATON,
|
ATTR_LOCATION,
|
||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAP,
|
ATTR_MAP,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -71,6 +75,7 @@ from ..const import (
|
|||||||
ATTR_URL,
|
ATTR_URL,
|
||||||
ATTR_USB,
|
ATTR_USB,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
ATTR_VERSION_TIMESTAMP,
|
||||||
ATTR_VIDEO,
|
ATTR_VIDEO,
|
||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
@@ -78,6 +83,7 @@ from ..const import (
|
|||||||
SECURITY_DISABLE,
|
SECURITY_DISABLE,
|
||||||
SECURITY_PROFILE,
|
SECURITY_PROFILE,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
|
AddonBootConfig,
|
||||||
AddonStage,
|
AddonStage,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
)
|
)
|
||||||
@@ -90,6 +96,7 @@ from ..utils import version_is_new_enough
|
|||||||
from .configuration import FolderMapping
|
from .configuration import FolderMapping
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_BACKUP,
|
ATTR_BACKUP,
|
||||||
|
ATTR_BREAKING_VERSIONS,
|
||||||
ATTR_CODENOTARY,
|
ATTR_CODENOTARY,
|
||||||
ATTR_PATH,
|
ATTR_PATH,
|
||||||
ATTR_READ_ONLY,
|
ATTR_READ_ONLY,
|
||||||
@@ -113,6 +120,10 @@ class AddonModel(JobGroup, ABC):
|
|||||||
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
coresys, JOB_GROUP_ADDON.format_map(defaultdict(str, slug=slug)), slug
|
||||||
)
|
)
|
||||||
self.slug: str = slug
|
self.slug: str = slug
|
||||||
|
self._path_icon_exists: bool = False
|
||||||
|
self._path_logo_exists: bool = False
|
||||||
|
self._path_changelog_exists: bool = False
|
||||||
|
self._path_documentation_exists: bool = False
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@@ -140,10 +151,15 @@ class AddonModel(JobGroup, ABC):
|
|||||||
return self.data[ATTR_OPTIONS]
|
return self.data[ATTR_OPTIONS]
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def boot(self) -> AddonBoot:
|
def boot_config(self) -> AddonBootConfig:
|
||||||
"""Return boot config with prio local settings."""
|
"""Return boot config."""
|
||||||
return self.data[ATTR_BOOT]
|
return self.data[ATTR_BOOT]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def boot(self) -> AddonBoot:
|
||||||
|
"""Return boot config with prio local settings unless config is forced."""
|
||||||
|
return AddonBoot(self.data[ATTR_BOOT])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def auto_update(self) -> bool | None:
|
def auto_update(self) -> bool | None:
|
||||||
"""Return if auto update is enable."""
|
"""Return if auto update is enable."""
|
||||||
@@ -221,6 +237,11 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return latest version of add-on."""
|
"""Return latest version of add-on."""
|
||||||
return self.data[ATTR_VERSION]
|
return self.data[ATTR_VERSION]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def latest_version_timestamp(self) -> datetime:
|
||||||
|
"""Return when latest version was first seen."""
|
||||||
|
return utc_from_timestamp(self.data[ATTR_VERSION_TIMESTAMP])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def version(self) -> AwesomeVersion:
|
def version(self) -> AwesomeVersion:
|
||||||
"""Return version of add-on."""
|
"""Return version of add-on."""
|
||||||
@@ -501,22 +522,22 @@ class AddonModel(JobGroup, ABC):
|
|||||||
@property
|
@property
|
||||||
def with_icon(self) -> bool:
|
def with_icon(self) -> bool:
|
||||||
"""Return True if an icon exists."""
|
"""Return True if an icon exists."""
|
||||||
return self.path_icon.exists()
|
return self._path_icon_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_logo(self) -> bool:
|
def with_logo(self) -> bool:
|
||||||
"""Return True if a logo exists."""
|
"""Return True if a logo exists."""
|
||||||
return self.path_logo.exists()
|
return self._path_logo_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_changelog(self) -> bool:
|
def with_changelog(self) -> bool:
|
||||||
"""Return True if a changelog exists."""
|
"""Return True if a changelog exists."""
|
||||||
return self.path_changelog.exists()
|
return self._path_changelog_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def with_documentation(self) -> bool:
|
def with_documentation(self) -> bool:
|
||||||
"""Return True if a documentation exists."""
|
"""Return True if a documentation exists."""
|
||||||
return self.path_documentation.exists()
|
return self._path_documentation_exists
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_arch(self) -> list[str]:
|
def supported_arch(self) -> list[str]:
|
||||||
@@ -560,7 +581,7 @@ class AddonModel(JobGroup, ABC):
|
|||||||
@property
|
@property
|
||||||
def path_location(self) -> Path:
|
def path_location(self) -> Path:
|
||||||
"""Return path to this add-on."""
|
"""Return path to this add-on."""
|
||||||
return Path(self.data[ATTR_LOCATON])
|
return Path(self.data[ATTR_LOCATION])
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_icon(self) -> Path:
|
def path_icon(self) -> Path:
|
||||||
@@ -620,6 +641,22 @@ class AddonModel(JobGroup, ABC):
|
|||||||
"""Return Signer email address for CAS."""
|
"""Return Signer email address for CAS."""
|
||||||
return self.data.get(ATTR_CODENOTARY)
|
return self.data.get(ATTR_CODENOTARY)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def breaking_versions(self) -> list[AwesomeVersion]:
|
||||||
|
"""Return breaking versions of addon."""
|
||||||
|
return self.data[ATTR_BREAKING_VERSIONS]
|
||||||
|
|
||||||
|
def refresh_path_cache(self) -> Awaitable[None]:
|
||||||
|
"""Refresh cache of existing paths."""
|
||||||
|
|
||||||
|
def check_paths():
|
||||||
|
self._path_icon_exists = self.path_icon.exists()
|
||||||
|
self._path_logo_exists = self.path_logo.exists()
|
||||||
|
self._path_changelog_exists = self.path_changelog.exists()
|
||||||
|
self._path_documentation_exists = self.path_documentation.exists()
|
||||||
|
|
||||||
|
return self.sys_run_in_executor(check_paths)
|
||||||
|
|
||||||
def validate_availability(self) -> None:
|
def validate_availability(self) -> None:
|
||||||
"""Validate if addon is available for current system."""
|
"""Validate if addon is available for current system."""
|
||||||
return self._validate_availability(self.data, logger=_LOGGER.error)
|
return self._validate_availability(self.data, logger=_LOGGER.error)
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Add-on Options / UI rendering."""
|
"""Add-on Options / UI rendering."""
|
||||||
|
|
||||||
import hashlib
|
import hashlib
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Util add-ons functions."""
|
"""Util add-ons functions."""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
@@ -45,6 +46,7 @@ def rating_security(addon: AddonModel) -> int:
|
|||||||
privilege in addon.privileged
|
privilege in addon.privileged
|
||||||
for privilege in (
|
for privilege in (
|
||||||
Capabilities.BPF,
|
Capabilities.BPF,
|
||||||
|
Capabilities.CHECKPOINT_RESTORE,
|
||||||
Capabilities.DAC_READ_SEARCH,
|
Capabilities.DAC_READ_SEARCH,
|
||||||
Capabilities.NET_ADMIN,
|
Capabilities.NET_ADMIN,
|
||||||
Capabilities.NET_RAW,
|
Capabilities.NET_RAW,
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Validate add-ons options schema."""
|
"""Validate add-ons options schema."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
import secrets
|
import secrets
|
||||||
@@ -54,7 +55,7 @@ from ..const import (
|
|||||||
ATTR_KERNEL_MODULES,
|
ATTR_KERNEL_MODULES,
|
||||||
ATTR_LABELS,
|
ATTR_LABELS,
|
||||||
ATTR_LEGACY,
|
ATTR_LEGACY,
|
||||||
ATTR_LOCATON,
|
ATTR_LOCATION,
|
||||||
ATTR_MACHINE,
|
ATTR_MACHINE,
|
||||||
ATTR_MAP,
|
ATTR_MAP,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
@@ -78,6 +79,8 @@ from ..const import (
|
|||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
|
ATTR_SYSTEM_MANAGED,
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TMPFS,
|
ATTR_TMPFS,
|
||||||
ATTR_TRANSLATIONS,
|
ATTR_TRANSLATIONS,
|
||||||
@@ -95,11 +98,11 @@ from ..const import (
|
|||||||
ROLE_ALL,
|
ROLE_ALL,
|
||||||
ROLE_DEFAULT,
|
ROLE_DEFAULT,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
|
AddonBootConfig,
|
||||||
AddonStage,
|
AddonStage,
|
||||||
AddonStartup,
|
AddonStartup,
|
||||||
AddonState,
|
AddonState,
|
||||||
)
|
)
|
||||||
from ..discovery.validate import valid_discovery_service
|
|
||||||
from ..docker.const import Capabilities
|
from ..docker.const import Capabilities
|
||||||
from ..validate import (
|
from ..validate import (
|
||||||
docker_image,
|
docker_image,
|
||||||
@@ -112,6 +115,7 @@ from ..validate import (
|
|||||||
)
|
)
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_BACKUP,
|
ATTR_BACKUP,
|
||||||
|
ATTR_BREAKING_VERSIONS,
|
||||||
ATTR_CODENOTARY,
|
ATTR_CODENOTARY,
|
||||||
ATTR_PATH,
|
ATTR_PATH,
|
||||||
ATTR_READ_ONLY,
|
ATTR_READ_ONLY,
|
||||||
@@ -189,20 +193,6 @@ def _warn_addon_config(config: dict[str, Any]):
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
invalid_services: list[str] = []
|
|
||||||
for service in config.get(ATTR_DISCOVERY, []):
|
|
||||||
try:
|
|
||||||
valid_discovery_service(service)
|
|
||||||
except vol.Invalid:
|
|
||||||
invalid_services.append(service)
|
|
||||||
|
|
||||||
if invalid_services:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Add-on lists the following unknown services for discovery: %s. Please report this to the maintainer of %s",
|
|
||||||
", ".join(invalid_services),
|
|
||||||
name,
|
|
||||||
)
|
|
||||||
|
|
||||||
return config
|
return config
|
||||||
|
|
||||||
|
|
||||||
@@ -332,7 +322,9 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
||||||
AddonStartup
|
AddonStartup
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
|
vol.Optional(ATTR_BOOT, default=AddonBootConfig.AUTO): vol.Coerce(
|
||||||
|
AddonBootConfig
|
||||||
|
),
|
||||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
||||||
@@ -422,6 +414,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
|||||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||||
),
|
),
|
||||||
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_BREAKING_VERSIONS, default=list): [version_tag],
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
@@ -480,6 +473,8 @@ SCHEMA_ADDON_USER = vol.Schema(
|
|||||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SYSTEM_MANAGED, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY, default=None): vol.Maybe(str),
|
||||||
},
|
},
|
||||||
extra=vol.REMOVE_EXTRA,
|
extra=vol.REMOVE_EXTRA,
|
||||||
)
|
)
|
||||||
@@ -488,7 +483,7 @@ SCHEMA_ADDON_SYSTEM = vol.All(
|
|||||||
_migrate_addon_config(),
|
_migrate_addon_config(),
|
||||||
_SCHEMA_ADDON_CONFIG.extend(
|
_SCHEMA_ADDON_CONFIG.extend(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_LOCATON): str,
|
vol.Required(ATTR_LOCATION): str,
|
||||||
vol.Required(ATTR_REPOSITORY): str,
|
vol.Required(ATTR_REPOSITORY): str,
|
||||||
vol.Required(ATTR_TRANSLATIONS, default=dict): {
|
vol.Required(ATTR_TRANSLATIONS, default=dict): {
|
||||||
str: SCHEMA_ADDON_TRANSLATIONS
|
str: SCHEMA_ADDON_TRANSLATIONS
|
||||||
|
@@ -1,20 +1,22 @@
|
|||||||
"""Init file for Supervisor RESTful API."""
|
"""Init file for Supervisor RESTful API."""
|
||||||
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp_fast_url_dispatcher import FastUrlDispatcher, attach_fast_url_dispatcher
|
|
||||||
|
|
||||||
from ..const import AddonState
|
from ..const import AddonState
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
from ..coresys import CoreSys, CoreSysAttributes
|
||||||
from ..exceptions import APIAddonNotInstalled
|
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
||||||
|
from ..utils.sentry import capture_exception
|
||||||
from .addons import APIAddons
|
from .addons import APIAddons
|
||||||
from .audio import APIAudio
|
from .audio import APIAudio
|
||||||
from .auth import APIAuth
|
from .auth import APIAuth
|
||||||
from .backups import APIBackups
|
from .backups import APIBackups
|
||||||
from .cli import APICli
|
from .cli import APICli
|
||||||
|
from .const import CONTENT_TYPE_TEXT
|
||||||
from .discovery import APIDiscovery
|
from .discovery import APIDiscovery
|
||||||
from .dns import APICoreDNS
|
from .dns import APICoreDNS
|
||||||
from .docker import APIDocker
|
from .docker import APIDocker
|
||||||
@@ -36,7 +38,7 @@ from .security import APISecurity
|
|||||||
from .services import APIServices
|
from .services import APIServices
|
||||||
from .store import APIStore
|
from .store import APIStore
|
||||||
from .supervisor import APISupervisor
|
from .supervisor import APISupervisor
|
||||||
from .utils import api_process
|
from .utils import api_process, api_process_raw
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -65,14 +67,19 @@ class RestAPI(CoreSysAttributes):
|
|||||||
"max_field_size": MAX_LINE_SIZE,
|
"max_field_size": MAX_LINE_SIZE,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
attach_fast_url_dispatcher(self.webapp, FastUrlDispatcher())
|
|
||||||
|
|
||||||
# service stuff
|
# service stuff
|
||||||
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
|
self._runner: web.AppRunner = web.AppRunner(self.webapp, shutdown_timeout=5)
|
||||||
self._site: web.TCPSite | None = None
|
self._site: web.TCPSite | None = None
|
||||||
|
|
||||||
|
# share single host API handler for reuse in logging endpoints
|
||||||
|
self._api_host: APIHost | None = None
|
||||||
|
|
||||||
async def load(self) -> None:
|
async def load(self) -> None:
|
||||||
"""Register REST API Calls."""
|
"""Register REST API Calls."""
|
||||||
|
self._api_host = APIHost()
|
||||||
|
self._api_host.coresys = self.coresys
|
||||||
|
|
||||||
self._register_addons()
|
self._register_addons()
|
||||||
self._register_audio()
|
self._register_audio()
|
||||||
self._register_auth()
|
self._register_auth()
|
||||||
@@ -102,10 +109,41 @@ class RestAPI(CoreSysAttributes):
|
|||||||
|
|
||||||
await self.start()
|
await self.start()
|
||||||
|
|
||||||
|
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||||
|
"""Register logs endpoint for a given path, returning logs for single syslog identifier."""
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs",
|
||||||
|
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/follow",
|
||||||
|
partial(
|
||||||
|
self._api_host.advanced_logs,
|
||||||
|
identifier=syslog_identifier,
|
||||||
|
follow=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/boots/{{bootid}}",
|
||||||
|
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||||
|
),
|
||||||
|
web.get(
|
||||||
|
f"{path}/logs/boots/{{bootid}}/follow",
|
||||||
|
partial(
|
||||||
|
self._api_host.advanced_logs,
|
||||||
|
identifier=syslog_identifier,
|
||||||
|
follow=True,
|
||||||
|
),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_host(self) -> None:
|
def _register_host(self) -> None:
|
||||||
"""Register hostcontrol functions."""
|
"""Register hostcontrol functions."""
|
||||||
api_host = APIHost()
|
api_host = self._api_host
|
||||||
api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
@@ -182,6 +220,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/os/config/sync", api_os.config_sync),
|
web.post("/os/config/sync", api_os.config_sync),
|
||||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||||
web.get("/os/datadisk/list", api_os.list_data),
|
web.get("/os/datadisk/list", api_os.list_data),
|
||||||
|
web.post("/os/datadisk/wipe", api_os.wipe_data),
|
||||||
|
web.post("/os/boot-slot", api_os.set_boot_slot),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -219,6 +259,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/jobs/info", api_jobs.info),
|
web.get("/jobs/info", api_jobs.info),
|
||||||
web.post("/jobs/options", api_jobs.options),
|
web.post("/jobs/options", api_jobs.options),
|
||||||
web.post("/jobs/reset", api_jobs.reset),
|
web.post("/jobs/reset", api_jobs.reset),
|
||||||
|
web.get("/jobs/{uuid}", api_jobs.job_info),
|
||||||
|
web.delete("/jobs/{uuid}", api_jobs.remove_job),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -257,11 +299,11 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/multicast/info", api_multicast.info),
|
web.get("/multicast/info", api_multicast.info),
|
||||||
web.get("/multicast/stats", api_multicast.stats),
|
web.get("/multicast/stats", api_multicast.stats),
|
||||||
web.get("/multicast/logs", api_multicast.logs),
|
|
||||||
web.post("/multicast/update", api_multicast.update),
|
web.post("/multicast/update", api_multicast.update),
|
||||||
web.post("/multicast/restart", api_multicast.restart),
|
web.post("/multicast/restart", api_multicast.restart),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
self._register_advanced_logs("/multicast", "hassio_multicast")
|
||||||
|
|
||||||
def _register_hardware(self) -> None:
|
def _register_hardware(self) -> None:
|
||||||
"""Register hardware functions."""
|
"""Register hardware functions."""
|
||||||
@@ -334,6 +376,7 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/auth", api_auth.auth),
|
web.post("/auth", api_auth.auth),
|
||||||
web.post("/auth/reset", api_auth.reset),
|
web.post("/auth/reset", api_auth.reset),
|
||||||
web.delete("/auth/cache", api_auth.cache),
|
web.delete("/auth/cache", api_auth.cache),
|
||||||
|
web.get("/auth/list", api_auth.list_users),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -347,7 +390,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/supervisor/ping", api_supervisor.ping),
|
web.get("/supervisor/ping", api_supervisor.ping),
|
||||||
web.get("/supervisor/info", api_supervisor.info),
|
web.get("/supervisor/info", api_supervisor.info),
|
||||||
web.get("/supervisor/stats", api_supervisor.stats),
|
web.get("/supervisor/stats", api_supervisor.stats),
|
||||||
web.get("/supervisor/logs", api_supervisor.logs),
|
|
||||||
web.post("/supervisor/update", api_supervisor.update),
|
web.post("/supervisor/update", api_supervisor.update),
|
||||||
web.post("/supervisor/reload", api_supervisor.reload),
|
web.post("/supervisor/reload", api_supervisor.reload),
|
||||||
web.post("/supervisor/restart", api_supervisor.restart),
|
web.post("/supervisor/restart", api_supervisor.restart),
|
||||||
@@ -356,6 +398,39 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def get_supervisor_logs(*args, **kwargs):
|
||||||
|
try:
|
||||||
|
return await self._api_host.advanced_logs_handler(
|
||||||
|
*args, identifier="hassio_supervisor", **kwargs
|
||||||
|
)
|
||||||
|
except Exception as err: # pylint: disable=broad-exception-caught
|
||||||
|
# Supervisor logs are critical, so catch everything, log the exception
|
||||||
|
# and try to return Docker container logs as the fallback
|
||||||
|
_LOGGER.exception(
|
||||||
|
"Failed to get supervisor logs using advanced_logs API"
|
||||||
|
)
|
||||||
|
if not isinstance(err, HostNotSupportedError):
|
||||||
|
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||||
|
# is known and reported to the user using the resolution center.
|
||||||
|
capture_exception(err)
|
||||||
|
kwargs.pop("follow", None) # Follow is not supported for Docker logs
|
||||||
|
return await api_supervisor.logs(*args, **kwargs)
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/supervisor/logs", get_supervisor_logs),
|
||||||
|
web.get(
|
||||||
|
"/supervisor/logs/follow",
|
||||||
|
partial(get_supervisor_logs, follow=True),
|
||||||
|
),
|
||||||
|
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
|
||||||
|
web.get(
|
||||||
|
"/supervisor/logs/boots/{bootid}/follow",
|
||||||
|
partial(get_supervisor_logs, follow=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
def _register_homeassistant(self) -> None:
|
def _register_homeassistant(self) -> None:
|
||||||
"""Register Home Assistant functions."""
|
"""Register Home Assistant functions."""
|
||||||
api_hass = APIHomeAssistant()
|
api_hass = APIHomeAssistant()
|
||||||
@@ -364,7 +439,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/core/info", api_hass.info),
|
web.get("/core/info", api_hass.info),
|
||||||
web.get("/core/logs", api_hass.logs),
|
|
||||||
web.get("/core/stats", api_hass.stats),
|
web.get("/core/stats", api_hass.stats),
|
||||||
web.post("/core/options", api_hass.options),
|
web.post("/core/options", api_hass.options),
|
||||||
web.post("/core/update", api_hass.update),
|
web.post("/core/update", api_hass.update),
|
||||||
@@ -376,11 +450,12 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/core", "homeassistant")
|
||||||
|
|
||||||
# Reroute from legacy
|
# Reroute from legacy
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/homeassistant/info", api_hass.info),
|
web.get("/homeassistant/info", api_hass.info),
|
||||||
web.get("/homeassistant/logs", api_hass.logs),
|
|
||||||
web.get("/homeassistant/stats", api_hass.stats),
|
web.get("/homeassistant/stats", api_hass.stats),
|
||||||
web.post("/homeassistant/options", api_hass.options),
|
web.post("/homeassistant/options", api_hass.options),
|
||||||
web.post("/homeassistant/restart", api_hass.restart),
|
web.post("/homeassistant/restart", api_hass.restart),
|
||||||
@@ -392,6 +467,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/homeassistant", "homeassistant")
|
||||||
|
|
||||||
def _register_proxy(self) -> None:
|
def _register_proxy(self) -> None:
|
||||||
"""Register Home Assistant API Proxy."""
|
"""Register Home Assistant API Proxy."""
|
||||||
api_proxy = APIProxy()
|
api_proxy = APIProxy()
|
||||||
@@ -433,18 +510,39 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||||
web.post("/addons/{addon}/restart", api_addons.restart),
|
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||||
web.post("/addons/{addon}/options", api_addons.options),
|
web.post("/addons/{addon}/options", api_addons.options),
|
||||||
|
web.post("/addons/{addon}/sys_options", api_addons.sys_options),
|
||||||
web.post(
|
web.post(
|
||||||
"/addons/{addon}/options/validate", api_addons.options_validate
|
"/addons/{addon}/options/validate", api_addons.options_validate
|
||||||
),
|
),
|
||||||
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
||||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
|
||||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||||
web.post("/addons/{addon}/security", api_addons.security),
|
web.post("/addons/{addon}/security", api_addons.security),
|
||||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||||
|
async def get_addon_logs(request, *args, **kwargs):
|
||||||
|
addon = api_addons.get_addon_for_request(request)
|
||||||
|
kwargs["identifier"] = f"addon_{addon.slug}"
|
||||||
|
return await self._api_host.advanced_logs(request, *args, **kwargs)
|
||||||
|
|
||||||
|
self.webapp.add_routes(
|
||||||
|
[
|
||||||
|
web.get("/addons/{addon}/logs", get_addon_logs),
|
||||||
|
web.get(
|
||||||
|
"/addons/{addon}/logs/follow",
|
||||||
|
partial(get_addon_logs, follow=True),
|
||||||
|
),
|
||||||
|
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
|
||||||
|
web.get(
|
||||||
|
"/addons/{addon}/logs/boots/{bootid}/follow",
|
||||||
|
partial(get_addon_logs, follow=True),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
# Legacy routing to support requests for not installed addons
|
# Legacy routing to support requests for not installed addons
|
||||||
api_store = APIStore()
|
api_store = APIStore()
|
||||||
api_store.coresys = self.coresys
|
api_store.coresys = self.coresys
|
||||||
@@ -542,7 +640,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
[
|
[
|
||||||
web.get("/dns/info", api_dns.info),
|
web.get("/dns/info", api_dns.info),
|
||||||
web.get("/dns/stats", api_dns.stats),
|
web.get("/dns/stats", api_dns.stats),
|
||||||
web.get("/dns/logs", api_dns.logs),
|
|
||||||
web.post("/dns/update", api_dns.update),
|
web.post("/dns/update", api_dns.update),
|
||||||
web.post("/dns/options", api_dns.options),
|
web.post("/dns/options", api_dns.options),
|
||||||
web.post("/dns/restart", api_dns.restart),
|
web.post("/dns/restart", api_dns.restart),
|
||||||
@@ -550,18 +647,17 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/dns", "hassio_dns")
|
||||||
|
|
||||||
def _register_audio(self) -> None:
|
def _register_audio(self) -> None:
|
||||||
"""Register Audio functions."""
|
"""Register Audio functions."""
|
||||||
api_audio = APIAudio()
|
api_audio = APIAudio()
|
||||||
api_audio.coresys = self.coresys
|
api_audio.coresys = self.coresys
|
||||||
api_host = APIHost()
|
|
||||||
api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes(
|
self.webapp.add_routes(
|
||||||
[
|
[
|
||||||
web.get("/audio/info", api_audio.info),
|
web.get("/audio/info", api_audio.info),
|
||||||
web.get("/audio/stats", api_audio.stats),
|
web.get("/audio/stats", api_audio.stats),
|
||||||
web.get("/audio/logs", api_audio.logs),
|
|
||||||
web.post("/audio/update", api_audio.update),
|
web.post("/audio/update", api_audio.update),
|
||||||
web.post("/audio/restart", api_audio.restart),
|
web.post("/audio/restart", api_audio.restart),
|
||||||
web.post("/audio/reload", api_audio.reload),
|
web.post("/audio/reload", api_audio.reload),
|
||||||
@@ -574,6 +670,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
self._register_advanced_logs("/audio", "hassio_audio")
|
||||||
|
|
||||||
def _register_mounts(self) -> None:
|
def _register_mounts(self) -> None:
|
||||||
"""Register mounts endpoints."""
|
"""Register mounts endpoints."""
|
||||||
api_mounts = APIMounts()
|
api_mounts = APIMounts()
|
||||||
@@ -600,7 +698,6 @@ class RestAPI(CoreSysAttributes):
|
|||||||
web.get("/store", api_store.store_info),
|
web.get("/store", api_store.store_info),
|
||||||
web.get("/store/addons", api_store.addons_list),
|
web.get("/store/addons", api_store.addons_list),
|
||||||
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
||||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
|
||||||
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
web.get("/store/addons/{addon}/icon", api_store.addons_addon_icon),
|
||||||
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
web.get("/store/addons/{addon}/logo", api_store.addons_addon_logo),
|
||||||
web.get(
|
web.get(
|
||||||
@@ -622,6 +719,8 @@ class RestAPI(CoreSysAttributes):
|
|||||||
"/store/addons/{addon}/update/{version}",
|
"/store/addons/{addon}/update/{version}",
|
||||||
api_store.addons_addon_update,
|
api_store.addons_addon_update,
|
||||||
),
|
),
|
||||||
|
# Must be below others since it has a wildcard in resource path
|
||||||
|
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||||
web.post("/store/reload", api_store.reload),
|
web.post("/store/reload", api_store.reload),
|
||||||
web.get("/store/repositories", api_store.repositories_list),
|
web.get("/store/repositories", api_store.repositories_list),
|
||||||
web.get(
|
web.get(
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -81,6 +82,8 @@ from ..const import (
|
|||||||
ATTR_STARTUP,
|
ATTR_STARTUP,
|
||||||
ATTR_STATE,
|
ATTR_STATE,
|
||||||
ATTR_STDIN,
|
ATTR_STDIN,
|
||||||
|
ATTR_SYSTEM_MANAGED,
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||||
ATTR_TRANSLATIONS,
|
ATTR_TRANSLATIONS,
|
||||||
ATTR_UART,
|
ATTR_UART,
|
||||||
ATTR_UDEV,
|
ATTR_UDEV,
|
||||||
@@ -95,6 +98,7 @@ from ..const import (
|
|||||||
ATTR_WEBUI,
|
ATTR_WEBUI,
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
AddonBoot,
|
AddonBoot,
|
||||||
|
AddonBootConfig,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..docker.stats import DockerStats
|
from ..docker.stats import DockerStats
|
||||||
@@ -102,12 +106,13 @@ from ..exceptions import (
|
|||||||
APIAddonNotInstalled,
|
APIAddonNotInstalled,
|
||||||
APIError,
|
APIError,
|
||||||
APIForbidden,
|
APIForbidden,
|
||||||
|
APINotFound,
|
||||||
PwnedError,
|
PwnedError,
|
||||||
PwnedSecret,
|
PwnedSecret,
|
||||||
)
|
)
|
||||||
from ..validate import docker_ports
|
from ..validate import docker_ports
|
||||||
from .const import ATTR_SIGNED, CONTENT_TYPE_BINARY
|
from .const import ATTR_BOOT_CONFIG, ATTR_REMOVE_CONFIG, ATTR_SIGNED
|
||||||
from .utils import api_process, api_process_raw, api_validate, json_loads
|
from .utils import api_process, api_validate, json_loads
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -126,15 +131,26 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
SCHEMA_SYS_OPTIONS = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_SYSTEM_MANAGED): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_SYSTEM_MANAGED_CONFIG_ENTRY): vol.Maybe(str),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||||
|
|
||||||
|
SCHEMA_UNINSTALL = vol.Schema(
|
||||||
|
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
|
||||||
|
)
|
||||||
|
# pylint: enable=no-value-for-parameter
|
||||||
|
|
||||||
|
|
||||||
class APIAddons(CoreSysAttributes):
|
class APIAddons(CoreSysAttributes):
|
||||||
"""Handle RESTful API for add-on functions."""
|
"""Handle RESTful API for add-on functions."""
|
||||||
|
|
||||||
def _extract_addon(self, request: web.Request) -> Addon:
|
def get_addon_for_request(self, request: web.Request) -> Addon:
|
||||||
"""Return addon, throw an exception it it doesn't exist."""
|
"""Return addon, throw an exception if it doesn't exist."""
|
||||||
addon_slug: str = request.match_info.get("addon")
|
addon_slug: str = request.match_info.get("addon")
|
||||||
|
|
||||||
# Lookup itself
|
# Lookup itself
|
||||||
@@ -146,7 +162,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
addon = self.sys_addons.get(addon_slug)
|
addon = self.sys_addons.get(addon_slug)
|
||||||
if not addon:
|
if not addon:
|
||||||
raise APIError(f"Addon {addon_slug} does not exist")
|
raise APINotFound(f"Addon {addon_slug} does not exist")
|
||||||
if not isinstance(addon, Addon) or not addon.is_installed:
|
if not isinstance(addon, Addon) or not addon.is_installed:
|
||||||
raise APIAddonNotInstalled("Addon is not installed")
|
raise APIAddonNotInstalled("Addon is not installed")
|
||||||
|
|
||||||
@@ -174,6 +190,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_URL: addon.url,
|
ATTR_URL: addon.url,
|
||||||
ATTR_ICON: addon.with_icon,
|
ATTR_ICON: addon.with_icon,
|
||||||
ATTR_LOGO: addon.with_logo,
|
ATTR_LOGO: addon.with_logo,
|
||||||
|
ATTR_SYSTEM_MANAGED: addon.system_managed,
|
||||||
}
|
}
|
||||||
for addon in self.sys_addons.installed
|
for addon in self.sys_addons.installed
|
||||||
]
|
]
|
||||||
@@ -187,7 +204,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return add-on information."""
|
"""Return add-on information."""
|
||||||
addon: AnyAddon = self._extract_addon(request)
|
addon: AnyAddon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
data = {
|
data = {
|
||||||
ATTR_NAME: addon.name,
|
ATTR_NAME: addon.name,
|
||||||
@@ -202,6 +219,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_VERSION_LATEST: addon.latest_version,
|
ATTR_VERSION_LATEST: addon.latest_version,
|
||||||
ATTR_PROTECTED: addon.protected,
|
ATTR_PROTECTED: addon.protected,
|
||||||
ATTR_RATING: rating_security(addon),
|
ATTR_RATING: rating_security(addon),
|
||||||
|
ATTR_BOOT_CONFIG: addon.boot_config,
|
||||||
ATTR_BOOT: addon.boot,
|
ATTR_BOOT: addon.boot,
|
||||||
ATTR_OPTIONS: addon.options,
|
ATTR_OPTIONS: addon.options,
|
||||||
ATTR_SCHEMA: addon.schema_ui,
|
ATTR_SCHEMA: addon.schema_ui,
|
||||||
@@ -261,6 +279,8 @@ class APIAddons(CoreSysAttributes):
|
|||||||
ATTR_WATCHDOG: addon.watchdog,
|
ATTR_WATCHDOG: addon.watchdog,
|
||||||
ATTR_DEVICES: addon.static_devices
|
ATTR_DEVICES: addon.static_devices
|
||||||
+ [device.path for device in addon.devices],
|
+ [device.path for device in addon.devices],
|
||||||
|
ATTR_SYSTEM_MANAGED: addon.system_managed,
|
||||||
|
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY: addon.system_managed_config_entry,
|
||||||
}
|
}
|
||||||
|
|
||||||
return data
|
return data
|
||||||
@@ -268,7 +288,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def options(self, request: web.Request) -> None:
|
async def options(self, request: web.Request) -> None:
|
||||||
"""Store user options for add-on."""
|
"""Store user options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
# Update secrets for validation
|
# Update secrets for validation
|
||||||
await self.sys_homeassistant.secrets.reload()
|
await self.sys_homeassistant.secrets.reload()
|
||||||
@@ -283,6 +303,10 @@ class APIAddons(CoreSysAttributes):
|
|||||||
if ATTR_OPTIONS in body:
|
if ATTR_OPTIONS in body:
|
||||||
addon.options = body[ATTR_OPTIONS]
|
addon.options = body[ATTR_OPTIONS]
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
|
if addon.boot_config == AddonBootConfig.MANUAL_ONLY:
|
||||||
|
raise APIError(
|
||||||
|
f"Addon {addon.slug} boot option is set to {addon.boot_config} so it cannot be changed"
|
||||||
|
)
|
||||||
addon.boot = body[ATTR_BOOT]
|
addon.boot = body[ATTR_BOOT]
|
||||||
if ATTR_AUTO_UPDATE in body:
|
if ATTR_AUTO_UPDATE in body:
|
||||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||||
@@ -300,10 +324,24 @@ class APIAddons(CoreSysAttributes):
|
|||||||
|
|
||||||
addon.save_persist()
|
addon.save_persist()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def sys_options(self, request: web.Request) -> None:
|
||||||
|
"""Store system options for an add-on."""
|
||||||
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
|
# Validate/Process Body
|
||||||
|
body = await api_validate(SCHEMA_SYS_OPTIONS, request)
|
||||||
|
if ATTR_SYSTEM_MANAGED in body:
|
||||||
|
addon.system_managed = body[ATTR_SYSTEM_MANAGED]
|
||||||
|
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
|
||||||
|
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
|
||||||
|
|
||||||
|
addon.save_persist()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def options_validate(self, request: web.Request) -> None:
|
async def options_validate(self, request: web.Request) -> None:
|
||||||
"""Validate user options for add-on."""
|
"""Validate user options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||||
|
|
||||||
options = await request.json(loads=json_loads) or addon.options
|
options = await request.json(loads=json_loads) or addon.options
|
||||||
@@ -345,7 +383,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
slug: str = request.match_info.get("addon")
|
slug: str = request.match_info.get("addon")
|
||||||
if slug != "self":
|
if slug != "self":
|
||||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
# Lookup/reload secrets
|
# Lookup/reload secrets
|
||||||
await self.sys_homeassistant.secrets.reload()
|
await self.sys_homeassistant.secrets.reload()
|
||||||
@@ -357,7 +395,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def security(self, request: web.Request) -> None:
|
async def security(self, request: web.Request) -> None:
|
||||||
"""Store security options for add-on."""
|
"""Store security options for add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||||
|
|
||||||
if ATTR_PROTECTED in body:
|
if ATTR_PROTECTED in body:
|
||||||
@@ -369,7 +407,7 @@ class APIAddons(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return resource information."""
|
"""Return resource information."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
|
|
||||||
stats: DockerStats = await addon.stats()
|
stats: DockerStats = await addon.stats()
|
||||||
|
|
||||||
@@ -385,48 +423,47 @@ class APIAddons(CoreSysAttributes):
|
|||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
async def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Uninstall add-on."""
|
"""Uninstall add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
return asyncio.shield(self.sys_addons.uninstall(addon.slug))
|
body: dict[str, Any] = await api_validate(SCHEMA_UNINSTALL, request)
|
||||||
|
return await asyncio.shield(
|
||||||
|
self.sys_addons.uninstall(
|
||||||
|
addon.slug, remove_config=body[ATTR_REMOVE_CONFIG]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def start(self, request: web.Request) -> None:
|
async def start(self, request: web.Request) -> None:
|
||||||
"""Start add-on."""
|
"""Start add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(addon.start()):
|
if start_task := await asyncio.shield(addon.start()):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop add-on."""
|
"""Stop add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
return asyncio.shield(addon.stop())
|
return asyncio.shield(addon.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restart(self, request: web.Request) -> None:
|
async def restart(self, request: web.Request) -> None:
|
||||||
"""Restart add-on."""
|
"""Restart add-on."""
|
||||||
addon: Addon = self._extract_addon(request)
|
addon: Addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(addon.restart()):
|
if start_task := await asyncio.shield(addon.restart()):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def rebuild(self, request: web.Request) -> None:
|
async def rebuild(self, request: web.Request) -> None:
|
||||||
"""Rebuild local build add-on."""
|
"""Rebuild local build add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
|
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
|
||||||
await start_task
|
await start_task
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return logs from add-on."""
|
|
||||||
addon = self._extract_addon(request)
|
|
||||||
return addon.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def stdin(self, request: web.Request) -> None:
|
async def stdin(self, request: web.Request) -> None:
|
||||||
"""Write to stdin of add-on."""
|
"""Write to stdin of add-on."""
|
||||||
addon = self._extract_addon(request)
|
addon = self.get_addon_for_request(request)
|
||||||
if not addon.with_stdin:
|
if not addon.with_stdin:
|
||||||
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
raise APIError(f"STDIN not supported the {addon.slug} add-on")
|
||||||
|
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Audio RESTful API."""
|
"""Init file for Supervisor Audio RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
@@ -35,8 +36,7 @@ from ..coresys import CoreSysAttributes
|
|||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..host.sound import StreamType
|
from ..host.sound import StreamType
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .utils import api_process, api_validate
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -111,11 +111,6 @@ class APIAudio(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.audio.update(version))
|
await asyncio.shield(self.sys_plugins.audio.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return Audio Docker logs."""
|
|
||||||
return self.sys_plugins.audio.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Audio plugin."""
|
"""Restart Audio plugin."""
|
||||||
|
@@ -1,6 +1,8 @@
|
|||||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import BasicAuth, web
|
from aiohttp import BasicAuth, web
|
||||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||||
@@ -8,11 +10,19 @@ from aiohttp.web_exceptions import HTTPUnauthorized
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..addons.addon import Addon
|
from ..addons.addon import Addon
|
||||||
from ..const import ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIForbidden
|
from ..exceptions import APIForbidden
|
||||||
from ..utils.json import json_loads
|
from ..utils.json import json_loads
|
||||||
from .const import CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
from .const import (
|
||||||
|
ATTR_GROUP_IDS,
|
||||||
|
ATTR_IS_ACTIVE,
|
||||||
|
ATTR_IS_OWNER,
|
||||||
|
ATTR_LOCAL_ONLY,
|
||||||
|
ATTR_USERS,
|
||||||
|
CONTENT_TYPE_JSON,
|
||||||
|
CONTENT_TYPE_URL,
|
||||||
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -90,3 +100,21 @@ class APIAuth(CoreSysAttributes):
|
|||||||
async def cache(self, request: web.Request) -> None:
|
async def cache(self, request: web.Request) -> None:
|
||||||
"""Process cache reset request."""
|
"""Process cache reset request."""
|
||||||
self.sys_auth.reset_data()
|
self.sys_auth.reset_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
||||||
|
"""List users on the Home Assistant instance."""
|
||||||
|
return {
|
||||||
|
ATTR_USERS: [
|
||||||
|
{
|
||||||
|
ATTR_USERNAME: user[ATTR_USERNAME],
|
||||||
|
ATTR_NAME: user[ATTR_NAME],
|
||||||
|
ATTR_IS_OWNER: user[ATTR_IS_OWNER],
|
||||||
|
ATTR_IS_ACTIVE: user[ATTR_IS_ACTIVE],
|
||||||
|
ATTR_LOCAL_ONLY: user[ATTR_LOCAL_ONLY],
|
||||||
|
ATTR_GROUP_IDS: user[ATTR_GROUP_IDS],
|
||||||
|
}
|
||||||
|
for user in await self.sys_auth.list_users()
|
||||||
|
if user[ATTR_USERNAME]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
@@ -1,5 +1,9 @@
|
|||||||
"""Backups RESTful API."""
|
"""Backups RESTful API."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from collections.abc import Callable
|
||||||
import errno
|
import errno
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
@@ -10,7 +14,10 @@ from typing import Any
|
|||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
from voluptuous.humanize import humanize_error
|
||||||
|
|
||||||
|
from ..backups.backup import Backup
|
||||||
|
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
|
||||||
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
from ..backups.validate import ALL_FOLDERS, FOLDER_HOMEASSISTANT, days_until_stale
|
||||||
from ..const import (
|
from ..const import (
|
||||||
ATTR_ADDONS,
|
ATTR_ADDONS,
|
||||||
@@ -19,77 +26,118 @@ from ..const import (
|
|||||||
ATTR_CONTENT,
|
ATTR_CONTENT,
|
||||||
ATTR_DATE,
|
ATTR_DATE,
|
||||||
ATTR_DAYS_UNTIL_STALE,
|
ATTR_DAYS_UNTIL_STALE,
|
||||||
|
ATTR_EXTRA,
|
||||||
|
ATTR_FILENAME,
|
||||||
ATTR_FOLDERS,
|
ATTR_FOLDERS,
|
||||||
ATTR_HOMEASSISTANT,
|
ATTR_HOMEASSISTANT,
|
||||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
||||||
ATTR_LOCATON,
|
ATTR_JOB_ID,
|
||||||
|
ATTR_LOCATION,
|
||||||
ATTR_NAME,
|
ATTR_NAME,
|
||||||
ATTR_PASSWORD,
|
ATTR_PASSWORD,
|
||||||
|
ATTR_PATH,
|
||||||
ATTR_PROTECTED,
|
ATTR_PROTECTED,
|
||||||
ATTR_REPOSITORIES,
|
ATTR_REPOSITORIES,
|
||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
|
ATTR_SIZE_BYTES,
|
||||||
ATTR_SLUG,
|
ATTR_SLUG,
|
||||||
ATTR_SUPERVISOR_VERSION,
|
ATTR_SUPERVISOR_VERSION,
|
||||||
ATTR_TIMEOUT,
|
ATTR_TIMEOUT,
|
||||||
ATTR_TYPE,
|
ATTR_TYPE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
|
REQUEST_FROM,
|
||||||
|
BusEvent,
|
||||||
|
CoreState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError, APIForbidden, APINotFound
|
||||||
|
from ..jobs import JobSchedulerOptions
|
||||||
from ..mounts.const import MountUsage
|
from ..mounts.const import MountUsage
|
||||||
from ..resolution.const import UnhealthyReason
|
from ..resolution.const import UnhealthyReason
|
||||||
from .const import CONTENT_TYPE_TAR
|
from .const import (
|
||||||
|
ATTR_ADDITIONAL_LOCATIONS,
|
||||||
|
ATTR_BACKGROUND,
|
||||||
|
ATTR_LOCATION_ATTRIBUTES,
|
||||||
|
ATTR_LOCATIONS,
|
||||||
|
CONTENT_TYPE_TAR,
|
||||||
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
ALL_ADDONS_FLAG = "ALL"
|
||||||
|
|
||||||
|
LOCATION_LOCAL = ".local"
|
||||||
|
|
||||||
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
||||||
|
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
|
||||||
|
|
||||||
# Backwards compatible
|
# Backwards compatible
|
||||||
# Remove: 2022.08
|
# Remove: 2022.08
|
||||||
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
_ALL_FOLDERS = ALL_FOLDERS + [FOLDER_HOMEASSISTANT]
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_list(item: Any) -> list:
|
||||||
|
"""Ensure value is a list."""
|
||||||
|
if not isinstance(item, list):
|
||||||
|
return [item]
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
|
def _convert_local_location(item: str | None) -> str | None:
|
||||||
|
"""Convert local location value."""
|
||||||
|
if item in {LOCATION_LOCAL, ""}:
|
||||||
|
return None
|
||||||
|
return item
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_RESTORE_PARTIAL = vol.Schema(
|
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique())
|
||||||
|
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
|
||||||
|
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
|
||||||
|
|
||||||
|
SCHEMA_RESTORE_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION,
|
||||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
|
SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||||
|
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_NAME): str,
|
vol.Optional(ATTR_NAME): str,
|
||||||
|
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||||
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
||||||
vol.Optional(ATTR_LOCATON): vol.Maybe(str),
|
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST,
|
||||||
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_EXTRA): dict,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
vol.Optional(ATTR_ADDONS): vol.Or(
|
||||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
ALL_ADDONS_FLAG, vol.All([str], vol.Unique())
|
||||||
|
),
|
||||||
|
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale})
|
||||||
{
|
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))})
|
||||||
vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
|
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST})
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
SCHEMA_FREEZE = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class APIBackups(CoreSysAttributes):
|
class APIBackups(CoreSysAttributes):
|
||||||
@@ -99,9 +147,19 @@ class APIBackups(CoreSysAttributes):
|
|||||||
"""Return backup, throw an exception if it doesn't exist."""
|
"""Return backup, throw an exception if it doesn't exist."""
|
||||||
backup = self.sys_backups.get(request.match_info.get("slug"))
|
backup = self.sys_backups.get(request.match_info.get("slug"))
|
||||||
if not backup:
|
if not backup:
|
||||||
raise APIError("Backup does not exist")
|
raise APINotFound("Backup does not exist")
|
||||||
return backup
|
return backup
|
||||||
|
|
||||||
|
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
|
||||||
|
"""Make location attributes dictionary."""
|
||||||
|
return {
|
||||||
|
loc if loc else LOCATION_LOCAL: {
|
||||||
|
ATTR_PROTECTED: backup.all_locations[loc][ATTR_PROTECTED],
|
||||||
|
ATTR_SIZE_BYTES: backup.all_locations[loc][ATTR_SIZE_BYTES],
|
||||||
|
}
|
||||||
|
for loc in backup.locations
|
||||||
|
}
|
||||||
|
|
||||||
def _list_backups(self):
|
def _list_backups(self):
|
||||||
"""Return list of backups."""
|
"""Return list of backups."""
|
||||||
return [
|
return [
|
||||||
@@ -111,8 +169,11 @@ class APIBackups(CoreSysAttributes):
|
|||||||
ATTR_DATE: backup.date,
|
ATTR_DATE: backup.date,
|
||||||
ATTR_TYPE: backup.sys_type,
|
ATTR_TYPE: backup.sys_type,
|
||||||
ATTR_SIZE: backup.size,
|
ATTR_SIZE: backup.size,
|
||||||
ATTR_LOCATON: backup.location,
|
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||||
|
ATTR_LOCATION: backup.location,
|
||||||
|
ATTR_LOCATIONS: backup.locations,
|
||||||
ATTR_PROTECTED: backup.protected,
|
ATTR_PROTECTED: backup.protected,
|
||||||
|
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
||||||
ATTR_COMPRESSED: backup.compressed,
|
ATTR_COMPRESSED: backup.compressed,
|
||||||
ATTR_CONTENT: {
|
ATTR_CONTENT: {
|
||||||
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
||||||
@@ -121,6 +182,7 @@ class APIBackups(CoreSysAttributes):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
for backup in self.sys_backups.list_backups
|
for backup in self.sys_backups.list_backups
|
||||||
|
if backup.location != LOCATION_CLOUD_BACKUP
|
||||||
]
|
]
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -180,104 +242,283 @@ class APIBackups(CoreSysAttributes):
|
|||||||
ATTR_NAME: backup.name,
|
ATTR_NAME: backup.name,
|
||||||
ATTR_DATE: backup.date,
|
ATTR_DATE: backup.date,
|
||||||
ATTR_SIZE: backup.size,
|
ATTR_SIZE: backup.size,
|
||||||
|
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||||
ATTR_COMPRESSED: backup.compressed,
|
ATTR_COMPRESSED: backup.compressed,
|
||||||
ATTR_PROTECTED: backup.protected,
|
ATTR_PROTECTED: backup.protected,
|
||||||
|
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
||||||
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
||||||
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
||||||
ATTR_LOCATON: backup.location,
|
ATTR_LOCATION: backup.location,
|
||||||
|
ATTR_LOCATIONS: backup.locations,
|
||||||
ATTR_ADDONS: data_addons,
|
ATTR_ADDONS: data_addons,
|
||||||
ATTR_REPOSITORIES: backup.repositories,
|
ATTR_REPOSITORIES: backup.repositories,
|
||||||
ATTR_FOLDERS: backup.folders,
|
ATTR_FOLDERS: backup.folders,
|
||||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
|
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE: backup.homeassistant_exclude_database,
|
||||||
|
ATTR_EXTRA: backup.extra,
|
||||||
}
|
}
|
||||||
|
|
||||||
def _location_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE:
|
||||||
"""Change location field to mount if necessary."""
|
"""Convert a single location to a mount if possible."""
|
||||||
if not body.get(ATTR_LOCATON):
|
if not location or location == LOCATION_CLOUD_BACKUP:
|
||||||
return body
|
return location
|
||||||
|
|
||||||
body[ATTR_LOCATON] = self.sys_mounts.get(body[ATTR_LOCATON])
|
mount = self.sys_mounts.get(location)
|
||||||
if body[ATTR_LOCATON].usage != MountUsage.BACKUP:
|
if mount.usage != MountUsage.BACKUP:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
f"Mount {body[ATTR_LOCATON].name} is not used for backups, cannot backup to there"
|
f"Mount {mount.name} is not used for backups, cannot backup to there"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
return mount
|
||||||
|
|
||||||
|
def _location_field_to_mount(self, body: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""Change location field to mount if necessary."""
|
||||||
|
body[ATTR_LOCATION] = self._location_to_mount(body.get(ATTR_LOCATION))
|
||||||
return body
|
return body
|
||||||
|
|
||||||
|
def _validate_cloud_backup_location(
|
||||||
|
self, request: web.Request, location: list[str | None] | str | None
|
||||||
|
) -> None:
|
||||||
|
"""Cloud backup location is only available to Home Assistant."""
|
||||||
|
if not isinstance(location, list):
|
||||||
|
location = [location]
|
||||||
|
if (
|
||||||
|
LOCATION_CLOUD_BACKUP in location
|
||||||
|
and request.get(REQUEST_FROM) != self.sys_homeassistant
|
||||||
|
):
|
||||||
|
raise APIForbidden(
|
||||||
|
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _background_backup_task(
|
||||||
|
self, backup_method: Callable, *args, **kwargs
|
||||||
|
) -> tuple[asyncio.Task, str]:
|
||||||
|
"""Start backup task in background and return task and job ID."""
|
||||||
|
event = asyncio.Event()
|
||||||
|
job, backup_task = self.sys_jobs.schedule_job(
|
||||||
|
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
async def release_on_freeze(new_state: CoreState):
|
||||||
|
if new_state == CoreState.FREEZE:
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
# Wait for system to get into freeze state before returning
|
||||||
|
# If the backup fails validation it will raise before getting there
|
||||||
|
listener = self.sys_bus.register_event(
|
||||||
|
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
event_task = self.sys_create_task(event.wait())
|
||||||
|
_, pending = await asyncio.wait(
|
||||||
|
(
|
||||||
|
backup_task,
|
||||||
|
event_task,
|
||||||
|
),
|
||||||
|
return_when=asyncio.FIRST_COMPLETED,
|
||||||
|
)
|
||||||
|
# It seems backup returned early (error or something), make sure to cancel
|
||||||
|
# the event task to avoid "Task was destroyed but it is pending!" errors.
|
||||||
|
if event_task in pending:
|
||||||
|
event_task.cancel()
|
||||||
|
return (backup_task, job.uuid)
|
||||||
|
finally:
|
||||||
|
self.sys_bus.remove_listener(listener)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_full(self, request):
|
async def backup_full(self, request: web.Request):
|
||||||
"""Create full backup."""
|
"""Create full backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||||
|
locations: list[LOCATION_TYPE] | None = None
|
||||||
|
|
||||||
backup = await asyncio.shield(
|
if ATTR_LOCATION in body:
|
||||||
self.sys_backups.do_backup_full(**self._location_to_mount(body))
|
location_names: list[str | None] = body.pop(ATTR_LOCATION)
|
||||||
|
self._validate_cloud_backup_location(request, location_names)
|
||||||
|
|
||||||
|
locations = [
|
||||||
|
self._location_to_mount(location) for location in location_names
|
||||||
|
]
|
||||||
|
body[ATTR_LOCATION] = locations.pop(0)
|
||||||
|
if locations:
|
||||||
|
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
||||||
|
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
|
backup_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_backup_full, **body
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if background and not backup_task.done():
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
|
||||||
|
backup: Backup = await backup_task
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||||
return False
|
raise APIError(
|
||||||
|
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def backup_partial(self, request):
|
async def backup_partial(self, request: web.Request):
|
||||||
"""Create a partial backup."""
|
"""Create a partial backup."""
|
||||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||||
backup = await asyncio.shield(
|
locations: list[LOCATION_TYPE] | None = None
|
||||||
self.sys_backups.do_backup_partial(**self._location_to_mount(body))
|
|
||||||
|
if ATTR_LOCATION in body:
|
||||||
|
location_names: list[str | None] = body.pop(ATTR_LOCATION)
|
||||||
|
self._validate_cloud_backup_location(request, location_names)
|
||||||
|
|
||||||
|
locations = [
|
||||||
|
self._location_to_mount(location) for location in location_names
|
||||||
|
]
|
||||||
|
body[ATTR_LOCATION] = locations.pop(0)
|
||||||
|
if locations:
|
||||||
|
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
||||||
|
|
||||||
|
if body.get(ATTR_ADDONS) == ALL_ADDONS_FLAG:
|
||||||
|
body[ATTR_ADDONS] = list(self.sys_addons.local)
|
||||||
|
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
|
backup_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_backup_partial, **body
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if background and not backup_task.done():
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
|
||||||
|
backup: Backup = await backup_task
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_JOB_ID: job_id, ATTR_SLUG: backup.slug}
|
||||||
return False
|
raise APIError(
|
||||||
|
f"An error occurred while making backup, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_full(self, request):
|
async def restore_full(self, request: web.Request):
|
||||||
"""Full restore of a backup."""
|
"""Full restore of a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||||
|
self._validate_cloud_backup_location(
|
||||||
|
request, body.get(ATTR_LOCATION, backup.location)
|
||||||
|
)
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
|
restore_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_restore_full, backup, **body
|
||||||
|
)
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
|
if background and not restore_task.done() or await restore_task:
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
raise APIError(
|
||||||
|
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def restore_partial(self, request):
|
async def restore_partial(self, request: web.Request):
|
||||||
"""Partial restore a backup."""
|
"""Partial restore a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||||
|
self._validate_cloud_backup_location(
|
||||||
|
request, body.get(ATTR_LOCATION, backup.location)
|
||||||
|
)
|
||||||
|
background = body.pop(ATTR_BACKGROUND)
|
||||||
|
restore_task, job_id = await self._background_backup_task(
|
||||||
|
self.sys_backups.do_restore_partial, backup, **body
|
||||||
|
)
|
||||||
|
|
||||||
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
if background and not restore_task.done() or await restore_task:
|
||||||
|
return {ATTR_JOB_ID: job_id}
|
||||||
|
raise APIError(
|
||||||
|
f"An error occurred during restore of {backup.slug}, check job '{job_id}' or supervisor logs for details",
|
||||||
|
job_id=job_id,
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def freeze(self, request):
|
async def freeze(self, request: web.Request):
|
||||||
"""Initiate manual freeze for external backup."""
|
"""Initiate manual freeze for external backup."""
|
||||||
body = await api_validate(SCHEMA_FREEZE, request)
|
body = await api_validate(SCHEMA_FREEZE, request)
|
||||||
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
await asyncio.shield(self.sys_backups.freeze_all(**body))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def thaw(self, request):
|
async def thaw(self, request: web.Request):
|
||||||
"""Begin thaw after manual freeze."""
|
"""Begin thaw after manual freeze."""
|
||||||
await self.sys_backups.thaw_all()
|
await self.sys_backups.thaw_all()
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def remove(self, request):
|
async def remove(self, request: web.Request):
|
||||||
"""Remove a backup."""
|
"""Remove a backup."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
return self.sys_backups.remove(backup)
|
body = await api_validate(SCHEMA_REMOVE, request)
|
||||||
|
locations: list[LOCATION_TYPE] | None = None
|
||||||
|
|
||||||
async def download(self, request):
|
if ATTR_LOCATION in body:
|
||||||
|
self._validate_cloud_backup_location(request, body[ATTR_LOCATION])
|
||||||
|
locations = [self._location_to_mount(name) for name in body[ATTR_LOCATION]]
|
||||||
|
else:
|
||||||
|
self._validate_cloud_backup_location(request, backup.location)
|
||||||
|
|
||||||
|
await self.sys_backups.remove(backup, locations=locations)
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def download(self, request: web.Request):
|
||||||
"""Download a backup file."""
|
"""Download a backup file."""
|
||||||
backup = self._extract_slug(request)
|
backup = self._extract_slug(request)
|
||||||
|
# Query will give us '' for /backups, convert value to None
|
||||||
|
location = _convert_local_location(
|
||||||
|
request.query.get(ATTR_LOCATION, backup.location)
|
||||||
|
)
|
||||||
|
self._validate_cloud_backup_location(request, location)
|
||||||
|
if location not in backup.all_locations:
|
||||||
|
raise APIError(f"Backup {backup.slug} is not in location {location}")
|
||||||
|
|
||||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||||
response = web.FileResponse(backup.tarfile)
|
filename = backup.all_locations[location][ATTR_PATH]
|
||||||
|
# If the file is missing, return 404 and trigger reload of location
|
||||||
|
if not filename.is_file():
|
||||||
|
self.sys_create_task(self.sys_backups.reload(location))
|
||||||
|
return web.Response(status=404)
|
||||||
|
|
||||||
|
response = web.FileResponse(filename)
|
||||||
response.content_type = CONTENT_TYPE_TAR
|
response.content_type = CONTENT_TYPE_TAR
|
||||||
response.headers[
|
|
||||||
CONTENT_DISPOSITION
|
download_filename = filename.name
|
||||||
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
if download_filename == f"{backup.slug}.tar":
|
||||||
|
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||||
|
response.headers[CONTENT_DISPOSITION] = (
|
||||||
|
f"attachment; filename={download_filename}"
|
||||||
|
)
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def upload(self, request):
|
async def upload(self, request: web.Request):
|
||||||
"""Upload a backup file."""
|
"""Upload a backup file."""
|
||||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
location: LOCATION_TYPE = None
|
||||||
|
locations: list[LOCATION_TYPE] | None = None
|
||||||
|
tmp_path = self.sys_config.path_tmp
|
||||||
|
if ATTR_LOCATION in request.query:
|
||||||
|
location_names: list[str] = request.query.getall(ATTR_LOCATION)
|
||||||
|
self._validate_cloud_backup_location(request, location_names)
|
||||||
|
# Convert empty string to None if necessary
|
||||||
|
locations = [
|
||||||
|
self._location_to_mount(location)
|
||||||
|
if _convert_local_location(location)
|
||||||
|
else None
|
||||||
|
for location in location_names
|
||||||
|
]
|
||||||
|
location = locations.pop(0)
|
||||||
|
|
||||||
|
if location and location != LOCATION_CLOUD_BACKUP:
|
||||||
|
tmp_path = location.local_where
|
||||||
|
|
||||||
|
filename: str | None = None
|
||||||
|
if ATTR_FILENAME in request.query:
|
||||||
|
filename = request.query.get(ATTR_FILENAME)
|
||||||
|
try:
|
||||||
|
vol.Match(RE_BACKUP_FILENAME)(filename)
|
||||||
|
except vol.Invalid as ex:
|
||||||
|
raise APIError(humanize_error(filename, ex)) from None
|
||||||
|
|
||||||
|
with TemporaryDirectory(dir=tmp_path.as_posix()) as temp_dir:
|
||||||
tar_file = Path(temp_dir, "backup.tar")
|
tar_file = Path(temp_dir, "backup.tar")
|
||||||
reader = await request.multipart()
|
reader = await request.multipart()
|
||||||
contents = await reader.next()
|
contents = await reader.next()
|
||||||
@@ -290,7 +531,10 @@ class APIBackups(CoreSysAttributes):
|
|||||||
backup.write(chunk)
|
backup.write(chunk)
|
||||||
|
|
||||||
except OSError as err:
|
except OSError as err:
|
||||||
if err.errno == errno.EBADMSG:
|
if err.errno == errno.EBADMSG and location in {
|
||||||
|
LOCATION_CLOUD_BACKUP,
|
||||||
|
None,
|
||||||
|
}:
|
||||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||||
_LOGGER.error("Can't write new backup file: %s", err)
|
_LOGGER.error("Can't write new backup file: %s", err)
|
||||||
return False
|
return False
|
||||||
@@ -298,7 +542,14 @@ class APIBackups(CoreSysAttributes):
|
|||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
|
backup = await asyncio.shield(
|
||||||
|
self.sys_backups.import_backup(
|
||||||
|
tar_file,
|
||||||
|
filename,
|
||||||
|
location=location,
|
||||||
|
additional_locations=locations,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
if backup:
|
if backup:
|
||||||
return {ATTR_SLUG: backup.slug}
|
return {ATTR_SLUG: backup.slug}
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor HA cli RESTful API."""
|
"""Init file for Supervisor HA cli RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
@@ -1,18 +1,26 @@
|
|||||||
"""Const for API."""
|
"""Const for API."""
|
||||||
|
|
||||||
|
from enum import StrEnum
|
||||||
|
|
||||||
CONTENT_TYPE_BINARY = "application/octet-stream"
|
CONTENT_TYPE_BINARY = "application/octet-stream"
|
||||||
CONTENT_TYPE_JSON = "application/json"
|
CONTENT_TYPE_JSON = "application/json"
|
||||||
CONTENT_TYPE_PNG = "image/png"
|
CONTENT_TYPE_PNG = "image/png"
|
||||||
CONTENT_TYPE_TAR = "application/tar"
|
CONTENT_TYPE_TAR = "application/tar"
|
||||||
CONTENT_TYPE_TEXT = "text/plain"
|
CONTENT_TYPE_TEXT = "text/plain"
|
||||||
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
CONTENT_TYPE_URL = "application/x-www-form-urlencoded"
|
||||||
|
CONTENT_TYPE_X_LOG = "text/x-log"
|
||||||
|
|
||||||
COOKIE_INGRESS = "ingress_session"
|
COOKIE_INGRESS = "ingress_session"
|
||||||
|
|
||||||
|
ATTR_ADDITIONAL_LOCATIONS = "additional_locations"
|
||||||
ATTR_AGENT_VERSION = "agent_version"
|
ATTR_AGENT_VERSION = "agent_version"
|
||||||
ATTR_APPARMOR_VERSION = "apparmor_version"
|
ATTR_APPARMOR_VERSION = "apparmor_version"
|
||||||
ATTR_ATTRIBUTES = "attributes"
|
ATTR_ATTRIBUTES = "attributes"
|
||||||
ATTR_AVAILABLE_UPDATES = "available_updates"
|
ATTR_AVAILABLE_UPDATES = "available_updates"
|
||||||
|
ATTR_BACKGROUND = "background"
|
||||||
|
ATTR_BOOT_CONFIG = "boot_config"
|
||||||
|
ATTR_BOOT_SLOT = "boot_slot"
|
||||||
|
ATTR_BOOT_SLOTS = "boot_slots"
|
||||||
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
||||||
ATTR_BOOTS = "boots"
|
ATTR_BOOTS = "boots"
|
||||||
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
ATTR_BROADCAST_LLMNR = "broadcast_llmnr"
|
||||||
@@ -30,25 +38,45 @@ ATTR_DT_UTC = "dt_utc"
|
|||||||
ATTR_EJECTABLE = "ejectable"
|
ATTR_EJECTABLE = "ejectable"
|
||||||
ATTR_FALLBACK = "fallback"
|
ATTR_FALLBACK = "fallback"
|
||||||
ATTR_FILESYSTEMS = "filesystems"
|
ATTR_FILESYSTEMS = "filesystems"
|
||||||
|
ATTR_FORCE = "force"
|
||||||
|
ATTR_GROUP_IDS = "group_ids"
|
||||||
ATTR_IDENTIFIERS = "identifiers"
|
ATTR_IDENTIFIERS = "identifiers"
|
||||||
|
ATTR_IS_ACTIVE = "is_active"
|
||||||
|
ATTR_IS_OWNER = "is_owner"
|
||||||
ATTR_JOBS = "jobs"
|
ATTR_JOBS = "jobs"
|
||||||
ATTR_LLMNR = "llmnr"
|
ATTR_LLMNR = "llmnr"
|
||||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||||
|
ATTR_LOCAL_ONLY = "local_only"
|
||||||
|
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
|
||||||
|
ATTR_LOCATIONS = "locations"
|
||||||
ATTR_MDNS = "mdns"
|
ATTR_MDNS = "mdns"
|
||||||
ATTR_MODEL = "model"
|
ATTR_MODEL = "model"
|
||||||
ATTR_MOUNTS = "mounts"
|
ATTR_MOUNTS = "mounts"
|
||||||
ATTR_MOUNT_POINTS = "mount_points"
|
ATTR_MOUNT_POINTS = "mount_points"
|
||||||
ATTR_PANEL_PATH = "panel_path"
|
ATTR_PANEL_PATH = "panel_path"
|
||||||
ATTR_REMOVABLE = "removable"
|
ATTR_REMOVABLE = "removable"
|
||||||
|
ATTR_REMOVE_CONFIG = "remove_config"
|
||||||
ATTR_REVISION = "revision"
|
ATTR_REVISION = "revision"
|
||||||
|
ATTR_SAFE_MODE = "safe_mode"
|
||||||
ATTR_SEAT = "seat"
|
ATTR_SEAT = "seat"
|
||||||
ATTR_SIGNED = "signed"
|
ATTR_SIGNED = "signed"
|
||||||
ATTR_STARTUP_TIME = "startup_time"
|
ATTR_STARTUP_TIME = "startup_time"
|
||||||
|
ATTR_STATUS = "status"
|
||||||
ATTR_SUBSYSTEM = "subsystem"
|
ATTR_SUBSYSTEM = "subsystem"
|
||||||
ATTR_SYSFS = "sysfs"
|
ATTR_SYSFS = "sysfs"
|
||||||
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
ATTR_SYSTEM_HEALTH_LED = "system_health_led"
|
||||||
ATTR_TIME_DETECTED = "time_detected"
|
ATTR_TIME_DETECTED = "time_detected"
|
||||||
ATTR_UPDATE_TYPE = "update_type"
|
ATTR_UPDATE_TYPE = "update_type"
|
||||||
ATTR_USE_NTP = "use_ntp"
|
|
||||||
ATTR_USAGE = "usage"
|
ATTR_USAGE = "usage"
|
||||||
|
ATTR_USE_NTP = "use_ntp"
|
||||||
|
ATTR_USERS = "users"
|
||||||
|
ATTR_USER_PATH = "user_path"
|
||||||
ATTR_VENDOR = "vendor"
|
ATTR_VENDOR = "vendor"
|
||||||
|
ATTR_VIRTUALIZATION = "virtualization"
|
||||||
|
|
||||||
|
|
||||||
|
class BootSlot(StrEnum):
|
||||||
|
"""Boot slots used by HAOS."""
|
||||||
|
|
||||||
|
A = "A"
|
||||||
|
B = "B"
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor network RESTful API."""
|
"""Init file for Supervisor network RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -15,8 +16,7 @@ from ..const import (
|
|||||||
AddonState,
|
AddonState,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..discovery.validate import valid_discovery_service
|
from ..exceptions import APIForbidden, APINotFound
|
||||||
from ..exceptions import APIError, APIForbidden
|
|
||||||
from .utils import api_process, api_validate, require_home_assistant
|
from .utils import api_process, api_validate, require_home_assistant
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -24,7 +24,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
SCHEMA_DISCOVERY = vol.Schema(
|
SCHEMA_DISCOVERY = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(ATTR_SERVICE): str,
|
vol.Required(ATTR_SERVICE): str,
|
||||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
vol.Required(ATTR_CONFIG): dict,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -36,7 +36,7 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
"""Extract discovery message from URL."""
|
"""Extract discovery message from URL."""
|
||||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||||
if not message:
|
if not message:
|
||||||
raise APIError("Discovery message not found")
|
raise APINotFound("Discovery message not found")
|
||||||
return message
|
return message
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -71,15 +71,6 @@ class APIDiscovery(CoreSysAttributes):
|
|||||||
addon: Addon = request[REQUEST_FROM]
|
addon: Addon = request[REQUEST_FROM]
|
||||||
service = body[ATTR_SERVICE]
|
service = body[ATTR_SERVICE]
|
||||||
|
|
||||||
try:
|
|
||||||
valid_discovery_service(service)
|
|
||||||
except vol.Invalid:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Received discovery message for unknown service %s from addon %s. Please report this to the maintainer of the add-on",
|
|
||||||
service,
|
|
||||||
addon.name,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Access?
|
# Access?
|
||||||
if body[ATTR_SERVICE] not in addon.discovery:
|
if body[ATTR_SERVICE] not in addon.discovery:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor DNS RESTful API."""
|
"""Init file for Supervisor DNS RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -26,8 +27,8 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import dns_server_list, version_tag
|
from ..validate import dns_server_list, version_tag
|
||||||
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS, CONTENT_TYPE_BINARY
|
from .const import ATTR_FALLBACK, ATTR_LLMNR, ATTR_MDNS
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -105,11 +106,6 @@ class APICoreDNS(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.dns.update(version))
|
await asyncio.shield(self.sys_plugins.dns.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return DNS Docker logs."""
|
|
||||||
return self.sys_plugins.dns.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart CoreDNS plugin."""
|
"""Restart CoreDNS plugin."""
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -15,6 +16,7 @@ from ..const import (
|
|||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APINotFound
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
@@ -57,6 +59,9 @@ class APIDocker(CoreSysAttributes):
|
|||||||
async def remove_registry(self, request: web.Request):
|
async def remove_registry(self, request: web.Request):
|
||||||
"""Delete a docker registry."""
|
"""Delete a docker registry."""
|
||||||
hostname = request.match_info.get(ATTR_HOSTNAME)
|
hostname = request.match_info.get(ATTR_HOSTNAME)
|
||||||
|
if hostname not in self.sys_docker.config.registries:
|
||||||
|
raise APINotFound(f"Hostname {hostname} does not exist in registries")
|
||||||
|
|
||||||
del self.sys_docker.config.registries[hostname]
|
del self.sys_docker.config.registries[hostname]
|
||||||
self.sys_docker.config.save_data()
|
self.sys_docker.config.save_data()
|
||||||
|
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor hardware RESTful API."""
|
"""Init file for Supervisor hardware RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -16,7 +17,7 @@ from ..const import (
|
|||||||
ATTR_SYSTEM,
|
ATTR_SYSTEM,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..dbus.udisks2 import UDisks2
|
from ..dbus.udisks2 import UDisks2Manager
|
||||||
from ..dbus.udisks2.block import UDisks2Block
|
from ..dbus.udisks2.block import UDisks2Block
|
||||||
from ..dbus.udisks2.drive import UDisks2Drive
|
from ..dbus.udisks2.drive import UDisks2Drive
|
||||||
from ..hardware.data import Device
|
from ..hardware.data import Device
|
||||||
@@ -72,7 +73,7 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def drive_struct(udisks2: UDisks2, drive: UDisks2Drive) -> dict[str, Any]:
|
def drive_struct(udisks2: UDisks2Manager, drive: UDisks2Drive) -> dict[str, Any]:
|
||||||
"""Return a dict with information of a disk to be used in the API."""
|
"""Return a dict with information of a disk to be used in the API."""
|
||||||
return {
|
return {
|
||||||
ATTR_VENDOR: drive.vendor,
|
ATTR_VENDOR: drive.vendor,
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -34,10 +35,10 @@ from ..const import (
|
|||||||
ATTR_WATCHDOG,
|
ATTR_WATCHDOG,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIDBMigrationInProgress, APIError
|
||||||
from ..validate import docker_image, network_port, version_tag
|
from ..validate import docker_image, network_port, version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .const import ATTR_FORCE, ATTR_SAFE_MODE
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -63,10 +64,34 @@ SCHEMA_UPDATE = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
SCHEMA_RESTART = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_SAFE_MODE, default=False): vol.Boolean(),
|
||||||
|
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
SCHEMA_STOP = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class APIHomeAssistant(CoreSysAttributes):
|
class APIHomeAssistant(CoreSysAttributes):
|
||||||
"""Handle RESTful API for Home Assistant functions."""
|
"""Handle RESTful API for Home Assistant functions."""
|
||||||
|
|
||||||
|
async def _check_offline_migration(self, force: bool = False) -> None:
|
||||||
|
"""Check and raise if there's an offline DB migration in progress."""
|
||||||
|
if (
|
||||||
|
not force
|
||||||
|
and (state := await self.sys_homeassistant.api.get_api_state())
|
||||||
|
and state.offline_db_migration
|
||||||
|
):
|
||||||
|
raise APIDBMigrationInProgress(
|
||||||
|
"Offline database migration in progress, try again after it has completed"
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
@@ -94,6 +119,9 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
|
|
||||||
if ATTR_IMAGE in body:
|
if ATTR_IMAGE in body:
|
||||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||||
|
self.sys_homeassistant.override_image = (
|
||||||
|
self.sys_homeassistant.image != self.sys_homeassistant.default_image
|
||||||
|
)
|
||||||
|
|
||||||
if ATTR_BOOT in body:
|
if ATTR_BOOT in body:
|
||||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||||
@@ -145,6 +173,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
async def update(self, request: web.Request) -> None:
|
async def update(self, request: web.Request) -> None:
|
||||||
"""Update Home Assistant."""
|
"""Update Home Assistant."""
|
||||||
body = await api_validate(SCHEMA_UPDATE, request)
|
body = await api_validate(SCHEMA_UPDATE, request)
|
||||||
|
await self._check_offline_migration()
|
||||||
|
|
||||||
await asyncio.shield(
|
await asyncio.shield(
|
||||||
self.sys_homeassistant.core.update(
|
self.sys_homeassistant.core.update(
|
||||||
@@ -154,9 +183,12 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
async def stop(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Stop Home Assistant."""
|
"""Stop Home Assistant."""
|
||||||
return asyncio.shield(self.sys_homeassistant.core.stop())
|
body = await api_validate(SCHEMA_STOP, request)
|
||||||
|
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
|
return await asyncio.shield(self.sys_homeassistant.core.stop())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def start(self, request: web.Request) -> Awaitable[None]:
|
def start(self, request: web.Request) -> Awaitable[None]:
|
||||||
@@ -164,19 +196,24 @@ class APIHomeAssistant(CoreSysAttributes):
|
|||||||
return asyncio.shield(self.sys_homeassistant.core.start())
|
return asyncio.shield(self.sys_homeassistant.core.start())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
async def restart(self, request: web.Request) -> None:
|
||||||
"""Restart Home Assistant."""
|
"""Restart Home Assistant."""
|
||||||
return asyncio.shield(self.sys_homeassistant.core.restart())
|
body = await api_validate(SCHEMA_RESTART, request)
|
||||||
|
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
|
await asyncio.shield(
|
||||||
|
self.sys_homeassistant.core.restart(safe_mode=body[ATTR_SAFE_MODE])
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
async def rebuild(self, request: web.Request) -> None:
|
||||||
"""Rebuild Home Assistant."""
|
"""Rebuild Home Assistant."""
|
||||||
return asyncio.shield(self.sys_homeassistant.core.rebuild())
|
body = await api_validate(SCHEMA_RESTART, request)
|
||||||
|
await self._check_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
await asyncio.shield(
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
self.sys_homeassistant.core.rebuild(safe_mode=body[ATTR_SAFE_MODE])
|
||||||
"""Return Home Assistant Docker logs."""
|
)
|
||||||
return self.sys_homeassistant.core.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def check(self, request: web.Request) -> None:
|
async def check(self, request: web.Request) -> None:
|
||||||
|
@@ -1,9 +1,10 @@
|
|||||||
"""Init file for Supervisor host RESTful API."""
|
"""Init file for Supervisor host RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import ClientConnectionResetError, web
|
||||||
from aiohttp.hdrs import ACCEPT, RANGE
|
from aiohttp.hdrs import ACCEPT, RANGE
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
from voluptuous.error import CoerceInvalid
|
from voluptuous.error import CoerceInvalid
|
||||||
@@ -27,8 +28,15 @@ from ..const import (
|
|||||||
ATTR_TIMEZONE,
|
ATTR_TIMEZONE,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, HostLogError
|
from ..exceptions import APIDBMigrationInProgress, APIError, HostLogError
|
||||||
from ..host.const import PARAM_BOOT_ID, PARAM_FOLLOW, PARAM_SYSLOG_IDENTIFIER
|
from ..host.const import (
|
||||||
|
PARAM_BOOT_ID,
|
||||||
|
PARAM_FOLLOW,
|
||||||
|
PARAM_SYSLOG_IDENTIFIER,
|
||||||
|
LogFormat,
|
||||||
|
LogFormatter,
|
||||||
|
)
|
||||||
|
from ..utils.systemd_journal import journal_logs_reader
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_AGENT_VERSION,
|
ATTR_AGENT_VERSION,
|
||||||
ATTR_APPARMOR_VERSION,
|
ATTR_APPARMOR_VERSION,
|
||||||
@@ -38,26 +46,48 @@ from .const import (
|
|||||||
ATTR_BROADCAST_MDNS,
|
ATTR_BROADCAST_MDNS,
|
||||||
ATTR_DT_SYNCHRONIZED,
|
ATTR_DT_SYNCHRONIZED,
|
||||||
ATTR_DT_UTC,
|
ATTR_DT_UTC,
|
||||||
|
ATTR_FORCE,
|
||||||
ATTR_IDENTIFIERS,
|
ATTR_IDENTIFIERS,
|
||||||
ATTR_LLMNR_HOSTNAME,
|
ATTR_LLMNR_HOSTNAME,
|
||||||
ATTR_STARTUP_TIME,
|
ATTR_STARTUP_TIME,
|
||||||
ATTR_USE_NTP,
|
ATTR_USE_NTP,
|
||||||
|
ATTR_VIRTUALIZATION,
|
||||||
CONTENT_TYPE_TEXT,
|
CONTENT_TYPE_TEXT,
|
||||||
|
CONTENT_TYPE_X_LOG,
|
||||||
)
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_process_raw, api_validate
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
IDENTIFIER = "identifier"
|
IDENTIFIER = "identifier"
|
||||||
BOOTID = "bootid"
|
BOOTID = "bootid"
|
||||||
DEFAULT_RANGE = 100
|
DEFAULT_LINES = 100
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
|
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
|
||||||
|
|
||||||
|
# pylint: disable=no-value-for-parameter
|
||||||
|
SCHEMA_SHUTDOWN = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_FORCE, default=False): vol.Boolean(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# pylint: enable=no-value-for-parameter
|
||||||
|
|
||||||
|
|
||||||
class APIHost(CoreSysAttributes):
|
class APIHost(CoreSysAttributes):
|
||||||
"""Handle RESTful API for host functions."""
|
"""Handle RESTful API for host functions."""
|
||||||
|
|
||||||
|
async def _check_ha_offline_migration(self, force: bool) -> None:
|
||||||
|
"""Check if HA has an offline migration in progress and raise if not forced."""
|
||||||
|
if (
|
||||||
|
not force
|
||||||
|
and (state := await self.sys_homeassistant.api.get_api_state())
|
||||||
|
and state.offline_db_migration
|
||||||
|
):
|
||||||
|
raise APIDBMigrationInProgress(
|
||||||
|
"Home Assistant offline database migration in progress, please wait until complete before shutting down host"
|
||||||
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request):
|
async def info(self, request):
|
||||||
"""Return host information."""
|
"""Return host information."""
|
||||||
@@ -65,6 +95,7 @@ class APIHost(CoreSysAttributes):
|
|||||||
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
||||||
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
||||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||||
|
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
|
||||||
ATTR_CPE: self.sys_host.info.cpe,
|
ATTR_CPE: self.sys_host.info.cpe,
|
||||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||||
@@ -98,14 +129,20 @@ class APIHost(CoreSysAttributes):
|
|||||||
)
|
)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def reboot(self, request):
|
async def reboot(self, request):
|
||||||
"""Reboot host."""
|
"""Reboot host."""
|
||||||
return asyncio.shield(self.sys_host.control.reboot())
|
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
||||||
|
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
|
return await asyncio.shield(self.sys_host.control.reboot())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def shutdown(self, request):
|
async def shutdown(self, request):
|
||||||
"""Poweroff host."""
|
"""Poweroff host."""
|
||||||
return asyncio.shield(self.sys_host.control.shutdown())
|
body = await api_validate(SCHEMA_SHUTDOWN, request)
|
||||||
|
await self._check_ha_offline_migration(force=body[ATTR_FORCE])
|
||||||
|
|
||||||
|
return await asyncio.shield(self.sys_host.control.shutdown())
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def reload(self, request):
|
def reload(self, request):
|
||||||
@@ -153,11 +190,11 @@ class APIHost(CoreSysAttributes):
|
|||||||
raise APIError() from err
|
raise APIError() from err
|
||||||
return possible_offset
|
return possible_offset
|
||||||
|
|
||||||
@api_process
|
async def advanced_logs_handler(
|
||||||
async def advanced_logs(
|
|
||||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||||
) -> web.StreamResponse:
|
) -> web.StreamResponse:
|
||||||
"""Return systemd-journald logs."""
|
"""Return systemd-journald logs."""
|
||||||
|
log_formatter = LogFormatter.PLAIN
|
||||||
params = {}
|
params = {}
|
||||||
if identifier:
|
if identifier:
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
||||||
@@ -165,6 +202,8 @@ class APIHost(CoreSysAttributes):
|
|||||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
||||||
else:
|
else:
|
||||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
||||||
|
# host logs should be always verbose, no matter what Accept header is used
|
||||||
|
log_formatter = LogFormatter.VERBOSE
|
||||||
|
|
||||||
if BOOTID in request.match_info:
|
if BOOTID in request.match_info:
|
||||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
||||||
@@ -175,28 +214,66 @@ class APIHost(CoreSysAttributes):
|
|||||||
|
|
||||||
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
||||||
CONTENT_TYPE_TEXT,
|
CONTENT_TYPE_TEXT,
|
||||||
|
CONTENT_TYPE_X_LOG,
|
||||||
"*/*",
|
"*/*",
|
||||||
]:
|
]:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
"Invalid content type requested. Only text/plain supported for now."
|
"Invalid content type requested. Only text/plain and text/x-log "
|
||||||
|
"supported for now."
|
||||||
)
|
)
|
||||||
|
|
||||||
if RANGE in request.headers:
|
if "verbose" in request.query or request.headers[ACCEPT] == CONTENT_TYPE_X_LOG:
|
||||||
|
log_formatter = LogFormatter.VERBOSE
|
||||||
|
|
||||||
|
if "lines" in request.query:
|
||||||
|
lines = request.query.get("lines", DEFAULT_LINES)
|
||||||
|
try:
|
||||||
|
lines = int(lines)
|
||||||
|
except ValueError:
|
||||||
|
# If the user passed a non-integer value, just use the default instead of error.
|
||||||
|
lines = DEFAULT_LINES
|
||||||
|
finally:
|
||||||
|
# We can't use the entries= Range header syntax to refer to the last 1 line,
|
||||||
|
# and passing 1 to the calculation below would return the 1st line of the logs
|
||||||
|
# instead. Since this is really an edge case that doesn't matter much, we'll just
|
||||||
|
# return 2 lines at minimum.
|
||||||
|
lines = max(2, lines)
|
||||||
|
# entries=cursor[[:num_skip]:num_entries]
|
||||||
|
range_header = f"entries=:-{lines - 1}:{'' if follow else lines}"
|
||||||
|
elif RANGE in request.headers:
|
||||||
range_header = request.headers.get(RANGE)
|
range_header = request.headers.get(RANGE)
|
||||||
else:
|
else:
|
||||||
range_header = f"entries=:-{DEFAULT_RANGE}:"
|
range_header = (
|
||||||
|
f"entries=:-{DEFAULT_LINES - 1}:{'' if follow else DEFAULT_LINES}"
|
||||||
|
)
|
||||||
|
|
||||||
async with self.sys_host.logs.journald_logs(
|
async with self.sys_host.logs.journald_logs(
|
||||||
params=params, range_header=range_header
|
params=params, range_header=range_header, accept=LogFormat.JOURNAL
|
||||||
) as resp:
|
) as resp:
|
||||||
try:
|
try:
|
||||||
response = web.StreamResponse()
|
response = web.StreamResponse()
|
||||||
response.content_type = CONTENT_TYPE_TEXT
|
response.content_type = CONTENT_TYPE_TEXT
|
||||||
await response.prepare(request)
|
headers_returned = False
|
||||||
async for data in resp.content:
|
async for cursor, line in journal_logs_reader(resp, log_formatter):
|
||||||
await response.write(data)
|
if not headers_returned:
|
||||||
|
if cursor:
|
||||||
|
response.headers["X-First-Cursor"] = cursor
|
||||||
|
response.headers["X-Accel-Buffering"] = "no"
|
||||||
|
await response.prepare(request)
|
||||||
|
headers_returned = True
|
||||||
|
# When client closes the connection while reading busy logs, we
|
||||||
|
# sometimes get this exception. It should be safe to ignore it.
|
||||||
|
with suppress(ClientConnectionResetError):
|
||||||
|
await response.write(line.encode("utf-8") + b"\n")
|
||||||
except ConnectionResetError as ex:
|
except ConnectionResetError as ex:
|
||||||
raise APIError(
|
raise APIError(
|
||||||
"Connection reset when trying to fetch data from systemd-journald."
|
"Connection reset when trying to fetch data from systemd-journald."
|
||||||
) from ex
|
) from ex
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||||
|
async def advanced_logs(
|
||||||
|
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||||
|
) -> web.StreamResponse:
|
||||||
|
"""Return systemd-journald logs. Wrapped as standard API handler."""
|
||||||
|
return await self.advanced_logs_handler(request, identifier, follow)
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Supervisor Add-on ingress service."""
|
"""Supervisor Add-on ingress service."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from ipaddress import ip_address
|
from ipaddress import ip_address
|
||||||
import logging
|
import logging
|
||||||
@@ -276,6 +277,7 @@ class APIIngress(CoreSysAttributes):
|
|||||||
response.content_type = content_type
|
response.content_type = content_type
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
response.headers["X-Accel-Buffering"] = "no"
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
async for data in result.content.iter_chunked(4096):
|
async for data in result.content.iter_chunked(4096):
|
||||||
await response.write(data)
|
await response.write(data)
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Jobs RESTful API."""
|
"""Init file for Supervisor Jobs RESTful API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -6,6 +7,7 @@ from aiohttp import web
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
|
from ..exceptions import APIError, APINotFound, JobNotFound
|
||||||
from ..jobs import SupervisorJob
|
from ..jobs import SupervisorJob
|
||||||
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
from ..jobs.const import ATTR_IGNORE_CONDITIONS, JobCondition
|
||||||
from .const import ATTR_JOBS
|
from .const import ATTR_JOBS
|
||||||
@@ -21,10 +23,24 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
class APIJobs(CoreSysAttributes):
|
class APIJobs(CoreSysAttributes):
|
||||||
"""Handle RESTful API for OS functions."""
|
"""Handle RESTful API for OS functions."""
|
||||||
|
|
||||||
def _list_jobs(self) -> list[dict[str, Any]]:
|
def _extract_job(self, request: web.Request) -> SupervisorJob:
|
||||||
"""Return current job tree."""
|
"""Extract job from request or raise."""
|
||||||
|
try:
|
||||||
|
return self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||||
|
except JobNotFound:
|
||||||
|
raise APINotFound("Job does not exist") from None
|
||||||
|
|
||||||
|
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
||||||
|
"""Return current job tree.
|
||||||
|
|
||||||
|
Jobs are added to cache as they are created so by default they are in oldest to newest.
|
||||||
|
This is correct ordering for child jobs as it makes logical sense to present those in
|
||||||
|
the order they occurred within the parent. For the list as a whole, sort from newest
|
||||||
|
to oldest as its likely any client is most interested in the newer ones.
|
||||||
|
"""
|
||||||
|
# Initially sort oldest to newest so all child lists end up in correct order
|
||||||
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
||||||
for job in self.sys_jobs.jobs:
|
for job in sorted(self.sys_jobs.jobs):
|
||||||
if job.internal:
|
if job.internal:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -33,10 +49,16 @@ class APIJobs(CoreSysAttributes):
|
|||||||
else:
|
else:
|
||||||
jobs_by_parent[job.parent_id].append(job)
|
jobs_by_parent[job.parent_id].append(job)
|
||||||
|
|
||||||
|
# After parent-child organization, sort the root jobs only from newest to oldest
|
||||||
job_list: list[dict[str, Any]] = []
|
job_list: list[dict[str, Any]] = []
|
||||||
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = [
|
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
||||||
(job_list, job) for job in jobs_by_parent.get(None, [])
|
[(job_list, start)]
|
||||||
]
|
if start
|
||||||
|
else [
|
||||||
|
(job_list, job)
|
||||||
|
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
while queue:
|
while queue:
|
||||||
(current_list, current_job) = queue.pop(0)
|
(current_list, current_job) = queue.pop(0)
|
||||||
@@ -78,3 +100,19 @@ class APIJobs(CoreSysAttributes):
|
|||||||
async def reset(self, request: web.Request) -> None:
|
async def reset(self, request: web.Request) -> None:
|
||||||
"""Reset options for JobManager."""
|
"""Reset options for JobManager."""
|
||||||
self.sys_jobs.reset_data()
|
self.sys_jobs.reset_data()
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||||
|
"""Get details of a job by ID."""
|
||||||
|
job = self._extract_job(request)
|
||||||
|
return self._list_jobs(job)[0]
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def remove_job(self, request: web.Request) -> None:
|
||||||
|
"""Remove a completed job."""
|
||||||
|
job = self._extract_job(request)
|
||||||
|
|
||||||
|
if not job.done:
|
||||||
|
raise APIError(f"Job {job.uuid} is not done!")
|
||||||
|
|
||||||
|
self.sys_jobs.remove_job(job)
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Handle security part of this API."""
|
"""Handle security part of this API."""
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from typing import Final
|
from typing import Final
|
||||||
@@ -8,6 +9,8 @@ from aiohttp.web import Request, RequestHandler, Response, middleware
|
|||||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
||||||
from awesomeversion import AwesomeVersion
|
from awesomeversion import AwesomeVersion
|
||||||
|
|
||||||
|
from supervisor.homeassistant.const import LANDINGPAGE
|
||||||
|
|
||||||
from ...addons.const import RE_SLUG
|
from ...addons.const import RE_SLUG
|
||||||
from ...const import (
|
from ...const import (
|
||||||
REQUEST_FROM,
|
REQUEST_FROM,
|
||||||
@@ -77,6 +80,13 @@ ADDONS_API_BYPASS: Final = re.compile(
|
|||||||
r")$"
|
r")$"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Home Assistant only
|
||||||
|
CORE_ONLY_PATHS: Final = re.compile(
|
||||||
|
r"^(?:"
|
||||||
|
r"/addons/" + RE_SLUG + "/sys_options"
|
||||||
|
r")$"
|
||||||
|
)
|
||||||
|
|
||||||
# Policy role add-on API access
|
# Policy role add-on API access
|
||||||
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
||||||
ROLE_DEFAULT: re.compile(
|
ROLE_DEFAULT: re.compile(
|
||||||
@@ -103,6 +113,8 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
r"|/addons(?:/" + RE_SLUG + r"/(?!security).+|/reload)?"
|
||||||
r"|/audio/.+"
|
r"|/audio/.+"
|
||||||
r"|/auth/cache"
|
r"|/auth/cache"
|
||||||
|
r"|/available_updates"
|
||||||
|
r"|/backups.*"
|
||||||
r"|/cli/.+"
|
r"|/cli/.+"
|
||||||
r"|/core/.+"
|
r"|/core/.+"
|
||||||
r"|/dns/.+"
|
r"|/dns/.+"
|
||||||
@@ -112,16 +124,17 @@ ADDONS_ROLE_ACCESS: dict[str, re.Pattern] = {
|
|||||||
r"|/hassos/.+"
|
r"|/hassos/.+"
|
||||||
r"|/homeassistant/.+"
|
r"|/homeassistant/.+"
|
||||||
r"|/host/.+"
|
r"|/host/.+"
|
||||||
|
r"|/mounts.*"
|
||||||
r"|/multicast/.+"
|
r"|/multicast/.+"
|
||||||
r"|/network/.+"
|
r"|/network/.+"
|
||||||
r"|/observer/.+"
|
r"|/observer/.+"
|
||||||
r"|/os/.+"
|
r"|/os/(?!datadisk/wipe).+"
|
||||||
|
r"|/refresh_updates"
|
||||||
r"|/resolution/.+"
|
r"|/resolution/.+"
|
||||||
r"|/backups.*"
|
r"|/security/.+"
|
||||||
r"|/snapshots.*"
|
r"|/snapshots.*"
|
||||||
r"|/store.*"
|
r"|/store.*"
|
||||||
r"|/supervisor/.+"
|
r"|/supervisor/.+"
|
||||||
r"|/security/.+"
|
|
||||||
r")$"
|
r")$"
|
||||||
),
|
),
|
||||||
ROLE_ADMIN: re.compile(
|
ROLE_ADMIN: re.compile(
|
||||||
@@ -229,6 +242,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
if supervisor_token == self.sys_homeassistant.supervisor_token:
|
if supervisor_token == self.sys_homeassistant.supervisor_token:
|
||||||
_LOGGER.debug("%s access from Home Assistant", request.path)
|
_LOGGER.debug("%s access from Home Assistant", request.path)
|
||||||
request_from = self.sys_homeassistant
|
request_from = self.sys_homeassistant
|
||||||
|
elif CORE_ONLY_PATHS.match(request.path):
|
||||||
|
_LOGGER.warning("Attempted access to %s from client besides Home Assistant")
|
||||||
|
raise HTTPForbidden()
|
||||||
|
|
||||||
# Host
|
# Host
|
||||||
if supervisor_token == self.sys_plugins.cli.supervisor_token:
|
if supervisor_token == self.sys_plugins.cli.supervisor_token:
|
||||||
@@ -274,8 +290,10 @@ class SecurityMiddleware(CoreSysAttributes):
|
|||||||
@middleware
|
@middleware
|
||||||
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
|
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
|
||||||
"""Validate user from Core API proxy."""
|
"""Validate user from Core API proxy."""
|
||||||
if request[REQUEST_FROM] != self.sys_homeassistant or version_is_new_enough(
|
if (
|
||||||
self.sys_homeassistant.version, _CORE_VERSION
|
request[REQUEST_FROM] != self.sys_homeassistant
|
||||||
|
or self.sys_homeassistant.version == LANDINGPAGE
|
||||||
|
or version_is_new_enough(self.sys_homeassistant.version, _CORE_VERSION)
|
||||||
):
|
):
|
||||||
return await handler(request)
|
return await handler(request)
|
||||||
|
|
||||||
|
@@ -7,11 +7,11 @@ import voluptuous as vol
|
|||||||
|
|
||||||
from ..const import ATTR_NAME, ATTR_STATE
|
from ..const import ATTR_NAME, ATTR_STATE
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError, APINotFound
|
||||||
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
||||||
from ..mounts.mount import Mount
|
from ..mounts.mount import Mount
|
||||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
||||||
from .const import ATTR_MOUNTS
|
from .const import ATTR_MOUNTS, ATTR_USER_PATH
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
SCHEMA_OPTIONS = vol.Schema(
|
SCHEMA_OPTIONS = vol.Schema(
|
||||||
@@ -24,6 +24,13 @@ SCHEMA_OPTIONS = vol.Schema(
|
|||||||
class APIMounts(CoreSysAttributes):
|
class APIMounts(CoreSysAttributes):
|
||||||
"""Handle REST API for mounting options."""
|
"""Handle REST API for mounting options."""
|
||||||
|
|
||||||
|
def _extract_mount(self, request: web.Request) -> Mount:
|
||||||
|
"""Extract mount from request or raise."""
|
||||||
|
name = request.match_info.get("mount")
|
||||||
|
if name not in self.sys_mounts:
|
||||||
|
raise APINotFound(f"No mount exists with name {name}")
|
||||||
|
return self.sys_mounts.get(name)
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return MountManager info."""
|
"""Return MountManager info."""
|
||||||
@@ -32,7 +39,13 @@ class APIMounts(CoreSysAttributes):
|
|||||||
if self.sys_mounts.default_backup_mount
|
if self.sys_mounts.default_backup_mount
|
||||||
else None,
|
else None,
|
||||||
ATTR_MOUNTS: [
|
ATTR_MOUNTS: [
|
||||||
mount.to_dict() | {ATTR_STATE: mount.state}
|
mount.to_dict()
|
||||||
|
| {
|
||||||
|
ATTR_STATE: mount.state,
|
||||||
|
ATTR_USER_PATH: mount.container_where.as_posix()
|
||||||
|
if mount.container_where
|
||||||
|
else None,
|
||||||
|
}
|
||||||
for mount in self.sys_mounts.mounts
|
for mount in self.sys_mounts.mounts
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
@@ -79,15 +92,13 @@ class APIMounts(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def update_mount(self, request: web.Request) -> None:
|
async def update_mount(self, request: web.Request) -> None:
|
||||||
"""Update an existing mount in supervisor."""
|
"""Update an existing mount in supervisor."""
|
||||||
name = request.match_info.get("mount")
|
current = self._extract_mount(request)
|
||||||
name_schema = vol.Schema(
|
name_schema = vol.Schema(
|
||||||
{vol.Optional(ATTR_NAME, default=name): name}, extra=vol.ALLOW_EXTRA
|
{vol.Optional(ATTR_NAME, default=current.name): current.name},
|
||||||
|
extra=vol.ALLOW_EXTRA,
|
||||||
)
|
)
|
||||||
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
||||||
|
|
||||||
if name not in self.sys_mounts:
|
|
||||||
raise APIError(f"No mount exists with name {name}")
|
|
||||||
|
|
||||||
mount = Mount.from_dict(self.coresys, body)
|
mount = Mount.from_dict(self.coresys, body)
|
||||||
await self.sys_mounts.create_mount(mount)
|
await self.sys_mounts.create_mount(mount)
|
||||||
|
|
||||||
@@ -104,8 +115,8 @@ class APIMounts(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def delete_mount(self, request: web.Request) -> None:
|
async def delete_mount(self, request: web.Request) -> None:
|
||||||
"""Delete an existing mount in supervisor."""
|
"""Delete an existing mount in supervisor."""
|
||||||
name = request.match_info.get("mount")
|
current = self._extract_mount(request)
|
||||||
mount = await self.sys_mounts.remove_mount(name)
|
mount = await self.sys_mounts.remove_mount(current.name)
|
||||||
|
|
||||||
# If it was a backup mount, reload backups
|
# If it was a backup mount, reload backups
|
||||||
if mount.usage == MountUsage.BACKUP:
|
if mount.usage == MountUsage.BACKUP:
|
||||||
@@ -116,9 +127,9 @@ class APIMounts(CoreSysAttributes):
|
|||||||
@api_process
|
@api_process
|
||||||
async def reload_mount(self, request: web.Request) -> None:
|
async def reload_mount(self, request: web.Request) -> None:
|
||||||
"""Reload an existing mount in supervisor."""
|
"""Reload an existing mount in supervisor."""
|
||||||
name = request.match_info.get("mount")
|
mount = self._extract_mount(request)
|
||||||
await self.sys_mounts.reload_mount(name)
|
await self.sys_mounts.reload_mount(mount.name)
|
||||||
|
|
||||||
# If it's a backup mount, reload backups
|
# If it's a backup mount, reload backups
|
||||||
if self.sys_mounts.get(name).usage == MountUsage.BACKUP:
|
if mount.usage == MountUsage.BACKUP:
|
||||||
self.sys_create_task(self.sys_backups.reload())
|
self.sys_create_task(self.sys_backups.reload())
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Multicast RESTful API."""
|
"""Init file for Supervisor Multicast RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -23,8 +24,7 @@ from ..const import (
|
|||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError
|
from ..exceptions import APIError
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import CONTENT_TYPE_BINARY
|
from .utils import api_process, api_validate
|
||||||
from .utils import api_process, api_process_raw, api_validate
|
|
||||||
|
|
||||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -69,11 +69,6 @@ class APIMulticast(CoreSysAttributes):
|
|||||||
raise APIError(f"Version {version} is already in use")
|
raise APIError(f"Version {version} is already in use")
|
||||||
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
await asyncio.shield(self.sys_plugins.multicast.update(version))
|
||||||
|
|
||||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
|
||||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
|
||||||
"""Return Multicast Docker logs."""
|
|
||||||
return self.sys_plugins.multicast.logs()
|
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||||
"""Restart Multicast plugin."""
|
"""Restart Multicast plugin."""
|
||||||
|
@@ -1,8 +1,8 @@
|
|||||||
"""REST API for network."""
|
"""REST API for network."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
from dataclasses import replace
|
from ipaddress import IPv4Address, IPv4Interface, IPv6Address, IPv6Interface
|
||||||
from ipaddress import ip_address, ip_interface
|
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
@@ -42,24 +42,34 @@ from ..const import (
|
|||||||
DOCKER_NETWORK_MASK,
|
DOCKER_NETWORK_MASK,
|
||||||
)
|
)
|
||||||
from ..coresys import CoreSysAttributes
|
from ..coresys import CoreSysAttributes
|
||||||
from ..exceptions import APIError, HostNetworkNotFound
|
from ..exceptions import APIError, APINotFound, HostNetworkNotFound
|
||||||
from ..host.configuration import (
|
from ..host.configuration import (
|
||||||
AccessPoint,
|
AccessPoint,
|
||||||
Interface,
|
Interface,
|
||||||
InterfaceMethod,
|
InterfaceMethod,
|
||||||
IpConfig,
|
IpConfig,
|
||||||
|
IpSetting,
|
||||||
VlanConfig,
|
VlanConfig,
|
||||||
WifiConfig,
|
WifiConfig,
|
||||||
)
|
)
|
||||||
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
from ..host.const import AuthMethod, InterfaceType, WifiMode
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
_SCHEMA_IP_CONFIG = vol.Schema(
|
_SCHEMA_IPV4_CONFIG = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(ip_interface)],
|
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv4Interface)],
|
||||||
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(ip_address),
|
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv4Address),
|
||||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(ip_address)],
|
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv4Address)],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
_SCHEMA_IPV6_CONFIG = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
|
||||||
|
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||||
|
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
|
||||||
|
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -76,18 +86,18 @@ _SCHEMA_WIFI_CONFIG = vol.Schema(
|
|||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_UPDATE = vol.Schema(
|
SCHEMA_UPDATE = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Optional(ATTR_IPV4): _SCHEMA_IP_CONFIG,
|
vol.Optional(ATTR_IPV4): _SCHEMA_IPV4_CONFIG,
|
||||||
vol.Optional(ATTR_IPV6): _SCHEMA_IP_CONFIG,
|
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
|
||||||
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
||||||
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
|
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
|
||||||
"""Return a dict with information about ip configuration."""
|
"""Return a dict with information about ip configuration."""
|
||||||
return {
|
return {
|
||||||
ATTR_METHOD: config.method,
|
ATTR_METHOD: setting.method,
|
||||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||||
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
||||||
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
||||||
@@ -122,8 +132,8 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
|||||||
ATTR_CONNECTED: interface.connected,
|
ATTR_CONNECTED: interface.connected,
|
||||||
ATTR_PRIMARY: interface.primary,
|
ATTR_PRIMARY: interface.primary,
|
||||||
ATTR_MAC: interface.mac,
|
ATTR_MAC: interface.mac,
|
||||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
|
||||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
|
||||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||||
}
|
}
|
||||||
@@ -157,7 +167,7 @@ class APINetwork(CoreSysAttributes):
|
|||||||
except HostNetworkNotFound:
|
except HostNetworkNotFound:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
raise APIError(f"Interface {name} does not exist") from None
|
raise APINotFound(f"Interface {name} does not exist") from None
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||||
@@ -197,24 +207,26 @@ class APINetwork(CoreSysAttributes):
|
|||||||
# Apply config
|
# Apply config
|
||||||
for key, config in body.items():
|
for key, config in body.items():
|
||||||
if key == ATTR_IPV4:
|
if key == ATTR_IPV4:
|
||||||
interface.ipv4 = replace(
|
interface.ipv4setting = IpSetting(
|
||||||
interface.ipv4
|
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
config.get(ATTR_ADDRESS, []),
|
||||||
**config,
|
config.get(ATTR_GATEWAY),
|
||||||
|
config.get(ATTR_NAMESERVERS, []),
|
||||||
)
|
)
|
||||||
elif key == ATTR_IPV6:
|
elif key == ATTR_IPV6:
|
||||||
interface.ipv6 = replace(
|
interface.ipv6setting = IpSetting(
|
||||||
interface.ipv6
|
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||||
or IpConfig(InterfaceMethod.STATIC, [], None, [], None),
|
config.get(ATTR_ADDRESS, []),
|
||||||
**config,
|
config.get(ATTR_GATEWAY),
|
||||||
|
config.get(ATTR_NAMESERVERS, []),
|
||||||
)
|
)
|
||||||
elif key == ATTR_WIFI:
|
elif key == ATTR_WIFI:
|
||||||
interface.wifi = replace(
|
interface.wifi = WifiConfig(
|
||||||
interface.wifi
|
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
|
||||||
or WifiConfig(
|
config.get(ATTR_SSID, ""),
|
||||||
WifiMode.INFRASTRUCTURE, "", AuthMethod.OPEN, None, None
|
config.get(ATTR_AUTH, AuthMethod.OPEN),
|
||||||
),
|
config.get(ATTR_PSK, None),
|
||||||
**config,
|
None,
|
||||||
)
|
)
|
||||||
elif key == ATTR_ENABLED:
|
elif key == ATTR_ENABLED:
|
||||||
interface.enabled = config
|
interface.enabled = config
|
||||||
@@ -256,24 +268,22 @@ class APINetwork(CoreSysAttributes):
|
|||||||
|
|
||||||
vlan_config = VlanConfig(vlan, interface.name)
|
vlan_config = VlanConfig(vlan, interface.name)
|
||||||
|
|
||||||
ipv4_config = None
|
ipv4_setting = None
|
||||||
if ATTR_IPV4 in body:
|
if ATTR_IPV4 in body:
|
||||||
ipv4_config = IpConfig(
|
ipv4_setting = IpSetting(
|
||||||
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||||
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||||
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||||
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||||
None,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
ipv6_config = None
|
ipv6_setting = None
|
||||||
if ATTR_IPV6 in body:
|
if ATTR_IPV6 in body:
|
||||||
ipv6_config = IpConfig(
|
ipv6_setting = IpSetting(
|
||||||
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||||
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||||
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||||
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
||||||
None,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
vlan_interface = Interface(
|
vlan_interface = Interface(
|
||||||
@@ -284,8 +294,10 @@ class APINetwork(CoreSysAttributes):
|
|||||||
True,
|
True,
|
||||||
False,
|
False,
|
||||||
InterfaceType.VLAN,
|
InterfaceType.VLAN,
|
||||||
ipv4_config,
|
None,
|
||||||
ipv6_config,
|
ipv4_setting,
|
||||||
|
None,
|
||||||
|
ipv6_setting,
|
||||||
None,
|
None,
|
||||||
vlan_config,
|
vlan_config,
|
||||||
)
|
)
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor Observer RESTful API."""
|
"""Init file for Supervisor Observer RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
@@ -1,4 +1,5 @@
|
|||||||
"""Init file for Supervisor HassOS RESTful API."""
|
"""Init file for Supervisor HassOS RESTful API."""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
from collections.abc import Awaitable
|
from collections.abc import Awaitable
|
||||||
import logging
|
import logging
|
||||||
@@ -19,6 +20,7 @@ from ..const import (
|
|||||||
ATTR_POWER_LED,
|
ATTR_POWER_LED,
|
||||||
ATTR_SERIAL,
|
ATTR_SERIAL,
|
||||||
ATTR_SIZE,
|
ATTR_SIZE,
|
||||||
|
ATTR_STATE,
|
||||||
ATTR_UPDATE_AVAILABLE,
|
ATTR_UPDATE_AVAILABLE,
|
||||||
ATTR_VERSION,
|
ATTR_VERSION,
|
||||||
ATTR_VERSION_LATEST,
|
ATTR_VERSION_LATEST,
|
||||||
@@ -28,13 +30,17 @@ from ..exceptions import BoardInvalidError
|
|||||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||||
from ..validate import version_tag
|
from ..validate import version_tag
|
||||||
from .const import (
|
from .const import (
|
||||||
|
ATTR_BOOT_SLOT,
|
||||||
|
ATTR_BOOT_SLOTS,
|
||||||
ATTR_DATA_DISK,
|
ATTR_DATA_DISK,
|
||||||
ATTR_DEV_PATH,
|
ATTR_DEV_PATH,
|
||||||
ATTR_DEVICE,
|
ATTR_DEVICE,
|
||||||
ATTR_DISKS,
|
ATTR_DISKS,
|
||||||
ATTR_MODEL,
|
ATTR_MODEL,
|
||||||
|
ATTR_STATUS,
|
||||||
ATTR_SYSTEM_HEALTH_LED,
|
ATTR_SYSTEM_HEALTH_LED,
|
||||||
ATTR_VENDOR,
|
ATTR_VENDOR,
|
||||||
|
BootSlot,
|
||||||
)
|
)
|
||||||
from .utils import api_process, api_validate
|
from .utils import api_process, api_validate
|
||||||
|
|
||||||
@@ -42,6 +48,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
|||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
# pylint: disable=no-value-for-parameter
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||||
|
SCHEMA_SET_BOOT_SLOT = vol.Schema({vol.Required(ATTR_BOOT_SLOT): vol.Coerce(BootSlot)})
|
||||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): str})
|
||||||
|
|
||||||
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
SCHEMA_YELLOW_OPTIONS = vol.Schema(
|
||||||
@@ -74,6 +81,15 @@ class APIOS(CoreSysAttributes):
|
|||||||
ATTR_BOARD: self.sys_os.board,
|
ATTR_BOARD: self.sys_os.board,
|
||||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||||
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used_id,
|
||||||
|
ATTR_BOOT_SLOTS: {
|
||||||
|
slot.bootname: {
|
||||||
|
ATTR_STATE: slot.state,
|
||||||
|
ATTR_STATUS: slot.boot_status,
|
||||||
|
ATTR_VERSION: slot.bundle_version,
|
||||||
|
}
|
||||||
|
for slot in self.sys_os.slots
|
||||||
|
if slot.bootname
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
@@ -96,6 +112,17 @@ class APIOS(CoreSysAttributes):
|
|||||||
|
|
||||||
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
def wipe_data(self, request: web.Request) -> Awaitable[None]:
|
||||||
|
"""Trigger data disk wipe on Host."""
|
||||||
|
return asyncio.shield(self.sys_os.datadisk.wipe_disk())
|
||||||
|
|
||||||
|
@api_process
|
||||||
|
async def set_boot_slot(self, request: web.Request) -> None:
|
||||||
|
"""Change the active boot slot and reboot into it."""
|
||||||
|
body = await api_validate(SCHEMA_SET_BOOT_SLOT, request)
|
||||||
|
await asyncio.shield(self.sys_os.set_boot_slot(body[ATTR_BOOT_SLOT]))
|
||||||
|
|
||||||
@api_process
|
@api_process
|
||||||
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
||||||
"""Return possible data targets."""
|
"""Return possible data targets."""
|
||||||
@@ -130,13 +157,17 @@ class APIOS(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
body = await api_validate(SCHEMA_GREEN_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_ACTIVITY_LED in body:
|
if ATTR_ACTIVITY_LED in body:
|
||||||
self.sys_dbus.agent.board.green.activity_led = body[ATTR_ACTIVITY_LED]
|
await self.sys_dbus.agent.board.green.set_activity_led(
|
||||||
|
body[ATTR_ACTIVITY_LED]
|
||||||
|
)
|
||||||
|
|
||||||
if ATTR_POWER_LED in body:
|
if ATTR_POWER_LED in body:
|
||||||
self.sys_dbus.agent.board.green.power_led = body[ATTR_POWER_LED]
|
await self.sys_dbus.agent.board.green.set_power_led(body[ATTR_POWER_LED])
|
||||||
|
|
||||||
if ATTR_SYSTEM_HEALTH_LED in body:
|
if ATTR_SYSTEM_HEALTH_LED in body:
|
||||||
self.sys_dbus.agent.board.green.user_led = body[ATTR_SYSTEM_HEALTH_LED]
|
await self.sys_dbus.agent.board.green.set_user_led(
|
||||||
|
body[ATTR_SYSTEM_HEALTH_LED]
|
||||||
|
)
|
||||||
|
|
||||||
self.sys_dbus.agent.board.green.save_data()
|
self.sys_dbus.agent.board.green.save_data()
|
||||||
|
|
||||||
@@ -155,13 +186,15 @@ class APIOS(CoreSysAttributes):
|
|||||||
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
body = await api_validate(SCHEMA_YELLOW_OPTIONS, request)
|
||||||
|
|
||||||
if ATTR_DISK_LED in body:
|
if ATTR_DISK_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.disk_led = body[ATTR_DISK_LED]
|
await self.sys_dbus.agent.board.yellow.set_disk_led(body[ATTR_DISK_LED])
|
||||||
|
|
||||||
if ATTR_HEARTBEAT_LED in body:
|
if ATTR_HEARTBEAT_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.heartbeat_led = body[ATTR_HEARTBEAT_LED]
|
await self.sys_dbus.agent.board.yellow.set_heartbeat_led(
|
||||||
|
body[ATTR_HEARTBEAT_LED]
|
||||||
|
)
|
||||||
|
|
||||||
if ATTR_POWER_LED in body:
|
if ATTR_POWER_LED in body:
|
||||||
self.sys_dbus.agent.board.yellow.power_led = body[ATTR_POWER_LED]
|
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
||||||
|
|
||||||
self.sys_dbus.agent.board.yellow.save_data()
|
self.sys_dbus.agent.board.yellow.save_data()
|
||||||
self.sys_resolution.create_issue(
|
self.sys_resolution.create_issue(
|
||||||
|
@@ -1 +1 @@
|
|||||||
!function(){function n(n){var t=document.createElement("script");t.src=n,document.body.appendChild(t)}if(/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent))n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js");else try{new Function("import('/api/hassio/app/frontend_latest/entrypoint-qzB1D0O4L9U.js')")()}catch(t){n("/api/hassio/app/frontend_es5/entrypoint-5yRSddAJzJ4.js")}}()
|
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([4-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(10[7-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[3-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[2-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[4-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.73ec900e351835f9.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.163d6939af79fd9b.js")}else d("/api/hassio/app/frontend_es5/entrypoint.163d6939af79fd9b.js")}()
|
4
supervisor/api/panel/entrypoint.js.br
Normal file
4
supervisor/api/panel/entrypoint.js.br
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
<1B> <20>Z<EFBFBD>^_<>w<EFBFBD>"5N<35><4E><EFBFBD>2<1D>PW2q9<71>I<EFBFBD>$<24><>/<2F><><EFBFBD>X<EFBFBD>AҰ<41><D2B0><03>!<21><>ӭ˳;<3B><><EFBFBD><1B>`<01><><EFBFBD><EFBFBD><EFBFBD>YD<59>Zkl<6B><6C><EFBFBD>>/t<><74> <20><><EFBFBD>tA<02>T<EFBFBD><05>}<7D>2 <20>y<1E>m<EFBFBD>{<7B>F1ʑ8<CA91><10><><EFBFBD>;*մ<><D5B4><EFBFBD>7<EFBFBD>ɰ<0F>y<EFBFBD>;l<>K<>I<<3C>z<EFBFBD><7A>$<24><><EFBFBD><EFBFBD><EFBFBD>6<EFBFBD>9U<39><55>*<12>i<><69>DU<44>n+aЬAE\<5C>F`<04> <09>(<28><>I<EFBFBD><49>瑽<EFBFBD><E791BD><EFBFBD><EFBFBD>I<EFBFBD>t<EFBFBD><74><EFBFBD><EFBFBD>˛8MU
|
||||||
|
Rǔ<EFBFBD> 7/<2F><><EFBFBD><EFBFBD><EFBFBD>ݷ<0F><><EFBFBD><EFBFBD>*<2A>!J<><4A><EFBFBD>H<EFBFBD>r<EFBFBD><72>_<EFBFBD><5F><05><>W!$%<25>kTy7<><0B><><EFBFBD>
|
||||||
|
<1D>&<26>?_s<5F><0B>DA;8t6<74><36>C<EFBFBD><43><EFBFBD><EFBFBD>zU<7A>.<2E><08><>C<EFBFBD>U<><55><EFBFBD><EFBFBD><EFBFBD><EFBFBD>ѵ<EFBFBD><D1B5><EFBFBD><EFBFBD><EFBFBD>As*")<29>lPr~<11><>j<EFBFBD>M<EFBFBD><4D><EFBFBD><14><>#<23><>r2<1E><><EFBFBD><EFBFBD>?<3F>3HT
|
||||||
|
<EFBFBD>^<5E><>B1<42><31><19><>yM'<27>l<EFBFBD><6C><0E><><06><16>N8/Ñ0<C391><30>h<EFBFBD><68><EFBFBD>=,2Gb<47>r[<5B><02>bzEN4<4E><34>J<EFBFBD><4A>2<EFBFBD>]VJ<56>Y<EFBFBD>!<21><><15><><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD><EFBFBD>vbƥ<62>1)g<>,3iٰ<><D9B0>`J<><17><><EFBFBD>"
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
|||||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([[1047],{32594:function(e,t,r){r.d(t,{U:function(){return n}});var n=function(e){return e.stopPropagation()}},75054:function(e,t,r){r.r(t),r.d(t,{HaTimeDuration:function(){return f}});var n,a=r(88962),i=r(33368),o=r(71650),d=r(82390),u=r(69205),l=r(70906),s=r(91808),c=r(68144),v=r(79932),f=(r(47289),(0,s.Z)([(0,v.Mo)("ha-selector-duration")],(function(e,t){var r=function(t){(0,u.Z)(n,t);var r=(0,l.Z)(n);function n(){var t;(0,o.Z)(this,n);for(var a=arguments.length,i=new Array(a),u=0;u<a;u++)i[u]=arguments[u];return t=r.call.apply(r,[this].concat(i)),e((0,d.Z)(t)),t}return(0,i.Z)(n)}(t);return{F:r,d:[{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,v.Cb)({attribute:!1})],key:"value",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,v.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,v.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){var e;return(0,c.dy)(n||(n=(0,a.Z)([' <ha-duration-input .label="','" .helper="','" .data="','" .disabled="','" .required="','" ?enableDay="','"></ha-duration-input> '])),this.label,this.helper,this.value,this.disabled,this.required,null===(e=this.selector.duration)||void 0===e?void 0:e.enable_day)}}]}}),c.oi))}}]);
|
|
||||||
//# sourceMappingURL=1047-g7fFLS9eP4I.js.map
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"file":"1047-g7fFLS9eP4I.js","mappings":"yKAAO,IAAMA,EAAkB,SAACC,GAAE,OAAKA,EAAGD,iBAAiB,C,qLCQ9CE,G,UAAcC,EAAAA,EAAAA,GAAA,EAD1BC,EAAAA,EAAAA,IAAc,0BAAuB,SAAAC,EAAAC,GAAA,IACzBJ,EAAc,SAAAK,IAAAC,EAAAA,EAAAA,GAAAN,EAAAK,GAAA,IAAAE,GAAAC,EAAAA,EAAAA,GAAAR,GAAA,SAAAA,IAAA,IAAAS,GAAAC,EAAAA,EAAAA,GAAA,KAAAV,GAAA,QAAAW,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,EAAAF,EAAAU,KAAAC,MAAAX,EAAA,OAAAY,OAAAL,IAAAX,GAAAiB,EAAAA,EAAAA,GAAAX,IAAAA,CAAA,QAAAY,EAAAA,EAAAA,GAAArB,EAAA,EAAAI,GAAA,OAAAkB,EAAdtB,EAAcuB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EACxBC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAG,EACjB,OAAOC,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,wIAEEC,KAAKC,MACJD,KAAKE,OACPF,KAAKP,MACDO,KAAKG,SACLH,KAAKI,SACkB,QADVR,EACZI,KAAKK,SAASC,gBAAQ,IAAAV,OAAA,EAAtBA,EAAwBW,WAG3C,IAAC,GA1BiCC,EAAAA,I","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/common/dom/stop_propagation.ts","https://raw.githubusercontent.com/home-assistant/frontend/20230703.0/src/components/ha-selector/ha-selector-duration.ts"],"names":["stopPropagation","ev","HaTimeDuration","_decorate","customElement","_initialize","_LitElement","_LitElement2","_inherits","_super","_createSuper","_this","_classCallCheck","_len","arguments","length","args","Array","_key","call","apply","concat","_assertThisInitialized","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","_this$selector$durati","html","_templateObject","_taggedTemplateLiteral","this","label","helper","disabled","required","selector","duration","enable_day","LitElement"],"sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1081.e647cbe586ff9dd0.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1081.e647cbe586ff9dd0.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1081.e647cbe586ff9dd0.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1081.e647cbe586ff9dd0.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"1081.e647cbe586ff9dd0.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250205.0/src/components/ha-button-toggle-group.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250205.0/src/components/ha-selector/ha-selector-button-toggle.ts"],"names":["_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","queryAll","html","_t","_","this","buttons","map","button","iconPath","_t2","label","active","_handleClick","_t3","styleMap","width","fullWidth","length","dense","_this$_buttons","_buttons","forEach","async","updateComplete","shadowRoot","querySelector","style","margin","ev","currentTarget","fireEvent","static","css","_t4","LitElement","HaButtonToggleSelector","_this$selector$button","_this$selector$button2","_this$selector$button3","options","selector","button_toggle","option","translationKey","translation_key","localizeValue","localizedLabel","sort","a","b","caseInsensitiveStringCompare","hass","locale","language","toggleButtons","item","_valueChanged","_ev$detail","_this$value","stopPropagation","detail","target","disabled","undefined"],"mappings":"sXAWgCA,EAAAA,EAAAA,GAAA,EAD/BC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAkIvC,OAAAC,EAlID,cACgCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC7BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,UAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,UAAW,aAAcG,KAAMC,WAAUH,IAAA,YAAAC,KAAAA,GAAA,OAClC,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEvBC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,QAAAC,KAAAA,GAAA,OAAgB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEhDO,EAAAA,EAAAA,IAAS,eAAaJ,IAAA,WAAAC,WAAA,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEvB,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,uBAELC,KAAKC,QAAQC,KAAKC,GAClBA,EAAOC,UACHP,EAAAA,EAAAA,IAAIQ,IAAAA,EAAAN,CAAA,2GACOI,EAAOG,MACRH,EAAOC,SACND,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,eAEhBX,EAAAA,EAAAA,IAAIY,IAAAA,EAAAV,CAAA,iHACMW,EAAAA,EAAAA,GAAS,CACfC,MAAOX,KAAKY,UACL,IAAMZ,KAAKC,QAAQY,OAAtB,IACA,YAGGb,KAAKc,MACLX,EAAOV,MACNO,KAAKO,SAAWJ,EAAOV,MACxBO,KAAKQ,aACXL,EAAOG,SAKxB,GAAC,CAAAlB,KAAA,SAAAI,IAAA,UAAAC,MAED,WAAoB,IAAAsB,EAEL,QAAbA,EAAAf,KAAKgB,gBAAQ,IAAAD,GAAbA,EAAeE,SAAQC,gBACff,EAAOgB,eAEXhB,EAAOiB,WAAYC,cAAc,UACjCC,MAAMC,OAAS,GAAG,GAExB,GAAC,CAAAnC,KAAA,SAAAI,IAAA,eAAAC,MAED,SAAqB+B,GACnBxB,KAAKO,OAASiB,EAAGC,cAAchC,OAC/BiC,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAAEP,MAAOO,KAAKO,QACjD,GAAC,CAAAnB,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGC,IAAAA,EAAA9B,CAAA,u0CAzDoB+B,EAAAA,I,MCD5BC,GAAsBnD,EAAAA,EAAAA,GAAA,EADlCC,EAAAA,EAAAA,IAAc,+BAA4B,SAAAC,EAAAC,GA4F1C,OAAAC,EA5FD,cACmCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAChCC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,gBAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAG9BC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAEnDC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WAAmB,IAAAuC,EAAAC,EAAAC,EACjB,MAAMC,GACuB,QAA3BH,EAAAhC,KAAKoC,SAASC,qBAAa,IAAAL,GAAS,QAATA,EAA3BA,EAA6BG,eAAO,IAAAH,OAAA,EAApCA,EAAsC9B,KAAKoC,GACvB,iBAAXA,EACFA,EACA,CAAE7C,MAAO6C,EAAQhC,MAAOgC,OAC1B,GAEDC,EAA4C,QAA9BN,EAAGjC,KAAKoC,SAASC,qBAAa,IAAAJ,OAAA,EAA3BA,EAA6BO,gBAEhDxC,KAAKyC,eAAiBF,GACxBJ,EAAQlB,SAASqB,IACf,MAAMI,EAAiB1C,KAAKyC,cAC1B,GAAGF,aAA0BD,EAAO7C,SAElCiD,IACFJ,EAAOhC,MAAQoC,EACjB,IAI2B,QAA/BR,EAAIlC,KAAKoC,SAASC,qBAAa,IAAAH,GAA3BA,EAA6BS,MAC/BR,EAAQQ,MAAK,CAACC,EAAGC,KACfC,EAAAA,EAAAA,GACEF,EAAEtC,MACFuC,EAAEvC,MACFN,KAAK+C,KAAKC,OAAOC,YAKvB,MAAMC,EAAgCf,EAAQjC,KAAKiD,IAAkB,CACnE7C,MAAO6C,EAAK7C,MACZb,MAAO0D,EAAK1D,UAGd,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,iHACPC,KAAKM,MAEM4C,EACDlD,KAAKP,MACEO,KAAKoD,cAG5B,GAAC,CAAAhE,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsB+B,GAAI,IAAA6B,EAAAC,EACxB9B,EAAG+B,kBAEH,MAAM9D,GAAiB,QAAT4D,EAAA7B,EAAGgC,cAAM,IAAAH,OAAA,EAATA,EAAW5D,QAAS+B,EAAGiC,OAAOhE,MACxCO,KAAK0D,eAAsBC,IAAVlE,GAAuBA,KAAqB,QAAhB6D,EAAMtD,KAAKP,aAAK,IAAA6D,EAAAA,EAAI,MAGrE5B,EAAAA,EAAAA,GAAU1B,KAAM,gBAAiB,CAC/BP,MAAOA,GAEX,GAAC,CAAAL,KAAA,QAAAuC,QAAA,EAAAnC,IAAA,SAAAC,KAAAA,GAAA,OAEemC,EAAAA,EAAAA,IAAGvB,IAAAA,EAAAN,CAAA,wLA5EuB+B,EAAAA,G"}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1121.6a80ad1fbfcedf85.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1121.6a80ad1fbfcedf85.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1121.6a80ad1fbfcedf85.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1121.6a80ad1fbfcedf85.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1173.df00e6361fed8e6c.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1173.df00e6361fed8e6c.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1173.df00e6361fed8e6c.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1173.df00e6361fed8e6c.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js
Normal file
2
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:()=>c});var d=t(73577),r=(t(71695),t(47021),t(57243)),n=t(50778),l=t(36522),o=t(63297),s=e([o]);o=(s.then?(await s)():s)[0];let u,h=e=>e,c=(0,d.Z)([(0,n.Mo)("ha-selector-navigation")],(function(e,a){return{F:class extends a{constructor(...a){super(...a),e(this)}},d:[{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,n.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,n.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,n.Cb)({type:Boolean,reflect:!0})],key:"disabled",value(){return!1}},{kind:"field",decorators:[(0,n.Cb)({type:Boolean})],key:"required",value(){return!0}},{kind:"method",key:"render",value:function(){return(0,r.dy)(u||(u=h` <ha-navigation-picker .hass="${0}" .label="${0}" .value="${0}" .required="${0}" .disabled="${0}" .helper="${0}" @value-changed="${0}"></ha-navigation-picker> `),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,l.B)(this,"value-changed",{value:e.detail.value})}}]}}),r.oi);i()}catch(u){i(u)}}))}}]);
|
||||||
|
//# sourceMappingURL=12.ffa1bdc0a98802fa.js.map
|
BIN
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/12.ffa1bdc0a98802fa.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"12.ffa1bdc0a98802fa.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250205.0/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","F","constructor","args","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_t","_","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"mVAQaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAiCvC,OAAAC,EAjCD,cACiCD,EAAoBE,WAAAA,IAAAC,GAAA,SAAAA,GAAAJ,EAAA,QAApBK,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,KAAAA,GAAA,OAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,EAAAC,CAAA,mKAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}
|
@@ -0,0 +1,2 @@
|
|||||||
|
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(){Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
|
||||||
|
//# sourceMappingURL=1236.64ca65d0ea4d76d4.js.map
|
BIN
supervisor/api/panel/frontend_es5/1236.64ca65d0ea4d76d4.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1236.64ca65d0ea4d76d4.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1236.64ca65d0ea4d76d4.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1236.64ca65d0ea4d76d4.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
|||||||
|
{"version":3,"file":"1236.64ca65d0ea4d76d4.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"wHAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}
|
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1258.bb6811ca0567a5d6.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1258.bb6811ca0567a5d6.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1258.bb6811ca0567a5d6.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1258.bb6811ca0567a5d6.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user