mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-26 05:19:29 +00:00
Compare commits
541 Commits
2024.12.1
...
autoupdate
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f29024ec56 | ||
![]() |
fabfe760fb | ||
![]() |
092013e457 | ||
![]() |
e13f216b2e | ||
![]() |
97c7686b95 | ||
![]() |
42f93d0176 | ||
![]() |
ed7155604c | ||
![]() |
595e33ac68 | ||
![]() |
ae70ffd1b2 | ||
![]() |
17cb18a371 | ||
![]() |
9f5bebd0eb | ||
![]() |
c712d3cc53 | ||
![]() |
46fc5c8aa1 | ||
![]() |
8b23383e26 | ||
![]() |
c1ccb00946 | ||
![]() |
5693a5be0d | ||
![]() |
01911a44cd | ||
![]() |
857dae7736 | ||
![]() |
d2ddd9579c | ||
![]() |
ac9947d599 | ||
![]() |
2e22e1e884 | ||
![]() |
e7f3573e32 | ||
![]() |
b26451a59a | ||
![]() |
4e882f7c76 | ||
![]() |
5fa50ccf05 | ||
![]() |
3891df5266 | ||
![]() |
5aad32c15b | ||
![]() |
4a40490af7 | ||
![]() |
0a46e030f5 | ||
![]() |
bd00f90304 | ||
![]() |
819f097f01 | ||
![]() |
4513592993 | ||
![]() |
7e526a26af | ||
![]() |
b3af22f048 | ||
![]() |
bbb9469c1c | ||
![]() |
859c32a706 | ||
![]() |
87fc84c65c | ||
![]() |
e38ca5acb4 | ||
![]() |
09cd8eede2 | ||
![]() |
d1c537b280 | ||
![]() |
e6785d6a89 | ||
![]() |
59e051ad93 | ||
![]() |
3397def8b9 | ||
![]() |
b832edc10d | ||
![]() |
f69071878c | ||
![]() |
e065ba6081 | ||
![]() |
38611ad12f | ||
![]() |
8beb66d46c | ||
![]() |
c277f3cad6 | ||
![]() |
236c39cbb0 | ||
![]() |
7ed83a15fe | ||
![]() |
a3a5f6ba98 | ||
![]() |
8d3ededf2f | ||
![]() |
3d62c9afb1 | ||
![]() |
ef313d1fb5 | ||
![]() |
cae31637ae | ||
![]() |
9392d10625 | ||
![]() |
5ce62f324f | ||
![]() |
f84d514958 | ||
![]() |
3c39f2f785 | ||
![]() |
30db72df78 | ||
![]() |
00a78f372b | ||
![]() |
b69546f2c1 | ||
![]() |
78be155b94 | ||
![]() |
9900dfc8ca | ||
![]() |
3a1ebc9d37 | ||
![]() |
580c3273dc | ||
![]() |
b889f94ca4 | ||
![]() |
2d12920b35 | ||
![]() |
8a95113ebd | ||
![]() |
3fc1abf661 | ||
![]() |
207b665e1d | ||
![]() |
1fb15772d7 | ||
![]() |
9740de7a83 | ||
![]() |
8e8d77d90c | ||
![]() |
dbce22bd08 | ||
![]() |
192d446888 | ||
![]() |
d95ca401ec | ||
![]() |
07d8fd006a | ||
![]() |
b49ce96df8 | ||
![]() |
4109c15a36 | ||
![]() |
d0e2778255 | ||
![]() |
014082eda8 | ||
![]() |
2324b70084 | ||
![]() |
43f20fe24f | ||
![]() |
8ef5eae22a | ||
![]() |
e5dd09ab6b | ||
![]() |
3f2db956cb | ||
![]() |
603df92618 | ||
![]() |
8a82b98e5b | ||
![]() |
07dd0b7394 | ||
![]() |
cf0a85a4b1 | ||
![]() |
9924165cd3 | ||
![]() |
91392a5443 | ||
![]() |
fd205ce2ef | ||
![]() |
9ec56d9266 | ||
![]() |
886b1bd281 | ||
![]() |
ee0474edf5 | ||
![]() |
f173489e69 | ||
![]() |
cee495bde3 | ||
![]() |
59104a4438 | ||
![]() |
e4eaeb91cd | ||
![]() |
e61d88779d | ||
![]() |
0513ea0438 | ||
![]() |
030927dc01 | ||
![]() |
cad14bf46e | ||
![]() |
5d851ad747 | ||
![]() |
528032fb36 | ||
![]() |
3b093200e3 | ||
![]() |
15ba1a3c94 | ||
![]() |
8e4a87c751 | ||
![]() |
fdde95d849 | ||
![]() |
65e5a36aa7 | ||
![]() |
bd62602cde | ||
![]() |
f9bcc273f8 | ||
![]() |
059b161f4f | ||
![]() |
f11eb6b35a | ||
![]() |
9bee58a8b1 | ||
![]() |
8a1e6b0895 | ||
![]() |
f150d1b287 | ||
![]() |
628a18c6b8 | ||
![]() |
74e43411e5 | ||
![]() |
e6b0d4144c | ||
![]() |
033896480d | ||
![]() |
478e00c0fe | ||
![]() |
6f2ba7d68c | ||
![]() |
22afa60f55 | ||
![]() |
9f2fda5dc7 | ||
![]() |
27b092aed0 | ||
![]() |
3af13cb7e2 | ||
![]() |
6871ea4b81 | ||
![]() |
cf77ab2290 | ||
![]() |
ceeffa3284 | ||
![]() |
31f2f70cd9 | ||
![]() |
deac85bddb | ||
![]() |
7dcf5ba631 | ||
![]() |
a004830131 | ||
![]() |
a8cc6c416d | ||
![]() |
74b26642b0 | ||
![]() |
5e26ab5f4a | ||
![]() |
a841cb8282 | ||
![]() |
3b1b03c8a7 | ||
![]() |
680428f304 | ||
![]() |
f34128c37e | ||
![]() |
2ed0682b34 | ||
![]() |
fbb0915ef8 | ||
![]() |
780ae1e15c | ||
![]() |
c617358855 | ||
![]() |
b679c4f4d8 | ||
![]() |
c946c421f2 | ||
![]() |
aeabf7ea25 | ||
![]() |
365b838abf | ||
![]() |
99c040520e | ||
![]() |
eefe2f2e06 | ||
![]() |
a366e36b37 | ||
![]() |
27a2fde9e1 | ||
![]() |
9a0f530a2f | ||
![]() |
baf9695cf7 | ||
![]() |
7873c457d5 | ||
![]() |
cbc48c381f | ||
![]() |
11e37011bd | ||
![]() |
cfda559a90 | ||
![]() |
806bd9f52c | ||
![]() |
953f7d01d7 | ||
![]() |
381e719a0e | ||
![]() |
296071067d | ||
![]() |
8336537f51 | ||
![]() |
5c90a00263 | ||
![]() |
1f2bf77784 | ||
![]() |
9aa4f381b8 | ||
![]() |
ae036ceffe | ||
![]() |
f0ea0d4a44 | ||
![]() |
abc44946bb | ||
![]() |
3e20a0937d | ||
![]() |
6cebf52249 | ||
![]() |
bc57deb474 | ||
![]() |
38750d74a8 | ||
![]() |
d1c1a2d418 | ||
![]() |
cf32f036c0 | ||
![]() |
b8852872fe | ||
![]() |
779f47e25d | ||
![]() |
be8b36b560 | ||
![]() |
8378d434d4 | ||
![]() |
0b79e09bc0 | ||
![]() |
d747a59696 | ||
![]() |
3ee7c082ec | ||
![]() |
3f921e50b3 | ||
![]() |
0370320f75 | ||
![]() |
1e19e26ef3 | ||
![]() |
e1a18eeba8 | ||
![]() |
b030879efd | ||
![]() |
dfa1602ac6 | ||
![]() |
bbda943583 | ||
![]() |
aea15b65b7 | ||
![]() |
5c04249e41 | ||
![]() |
456cec7ed1 | ||
![]() |
52a519e55c | ||
![]() |
fcb20d0ae8 | ||
![]() |
9b3f2b17bd | ||
![]() |
3d026b9534 | ||
![]() |
0e8ace949a | ||
![]() |
1fe6f8ad99 | ||
![]() |
9ef2352d12 | ||
![]() |
2543bcae29 | ||
![]() |
ad9de9f73c | ||
![]() |
a5556651ae | ||
![]() |
ac28deff6d | ||
![]() |
82ee4bc441 | ||
![]() |
bdbd09733a | ||
![]() |
d5b5a328d7 | ||
![]() |
52b24e177f | ||
![]() |
e10c58c424 | ||
![]() |
9682870c2c | ||
![]() |
fd0b894d6a | ||
![]() |
697515b81f | ||
![]() |
d912c234fa | ||
![]() |
e8445ae8f2 | ||
![]() |
6710439ce5 | ||
![]() |
95eec03c91 | ||
![]() |
9b686a2d9a | ||
![]() |
063d69da90 | ||
![]() |
baaf04981f | ||
![]() |
bdb25a7ff8 | ||
![]() |
ad2d6a3156 | ||
![]() |
42f885595e | ||
![]() |
2a88cb9339 | ||
![]() |
4d1a5e2dc2 | ||
![]() |
705e76abe3 | ||
![]() |
7f54383147 | ||
![]() |
63fde3b410 | ||
![]() |
5285e60cd3 | ||
![]() |
2a1e32bb36 | ||
![]() |
a2251e0729 | ||
![]() |
1efee641ba | ||
![]() |
bbb8fa0b92 | ||
![]() |
7593f857e8 | ||
![]() |
87232cf1e4 | ||
![]() |
9e6a4d65cd | ||
![]() |
c80fbd77c8 | ||
![]() |
a452969ffe | ||
![]() |
89fa5c9c7a | ||
![]() |
73069b628e | ||
![]() |
8251b6c61c | ||
![]() |
1faf529b42 | ||
![]() |
86c016b35d | ||
![]() |
4f35759fe3 | ||
![]() |
3b575eedba | ||
![]() |
6e6fe5ba39 | ||
![]() |
b5a7e521ae | ||
![]() |
bac7c21fe8 | ||
![]() |
2eb9ec20d6 | ||
![]() |
406348c068 | ||
![]() |
5e3f4e8ff3 | ||
![]() |
31a67bc642 | ||
![]() |
d0d11db7b1 | ||
![]() |
cbf4b4e27e | ||
![]() |
c855eaab52 | ||
![]() |
6bac751c4c | ||
![]() |
da0ae75e8e | ||
![]() |
154aeaee87 | ||
![]() |
b9bbb99f37 | ||
![]() |
ff849ce692 | ||
![]() |
24456efb6b | ||
![]() |
0cd9d04e63 | ||
![]() |
39bd20c0e7 | ||
![]() |
481bbc5be8 | ||
![]() |
36da382af3 | ||
![]() |
85f8107b60 | ||
![]() |
2e44e6494f | ||
![]() |
cd1cc66c77 | ||
![]() |
b76a1f58ea | ||
![]() |
3fcd254d25 | ||
![]() |
3dff2abe65 | ||
![]() |
ba91be1367 | ||
![]() |
25f93cd338 | ||
![]() |
9b0044edd6 | ||
![]() |
9915c21243 | ||
![]() |
657cb56fb9 | ||
![]() |
1b384cebc9 | ||
![]() |
61089c3507 | ||
![]() |
bc9e3eb95b | ||
![]() |
c1b45406d6 | ||
![]() |
8e714072c2 | ||
![]() |
88087046de | ||
![]() |
53393afe8d | ||
![]() |
4b5bcece64 | ||
![]() |
0e7e4f8b42 | ||
![]() |
9470f44840 | ||
![]() |
0e55e6e67b | ||
![]() |
6116425265 | ||
![]() |
de497cdc19 | ||
![]() |
88b41e80bb | ||
![]() |
876afdb26e | ||
![]() |
9d062c8ed0 | ||
![]() |
122b73202b | ||
![]() |
5d07dd2c42 | ||
![]() |
adfb433f57 | ||
![]() |
198af54d1e | ||
![]() |
c3e63a5669 | ||
![]() |
8f27958e20 | ||
![]() |
6fad7d14e1 | ||
![]() |
f7317134e3 | ||
![]() |
9d8db27701 | ||
![]() |
7da3a34304 | ||
![]() |
d413e0dcb9 | ||
![]() |
542ab0411c | ||
![]() |
999789f7ce | ||
![]() |
de105f8cb7 | ||
![]() |
b37b0ff744 | ||
![]() |
db330ab58a | ||
![]() |
4a00caa2e8 | ||
![]() |
59a7e9519d | ||
![]() |
dedf5df5ad | ||
![]() |
d09b686269 | ||
![]() |
9b8f03fa00 | ||
![]() |
2a3d0fdf61 | ||
![]() |
eaae40718b | ||
![]() |
5a88128cec | ||
![]() |
62b3259d9c | ||
![]() |
5e05af26a8 | ||
![]() |
c5186101d3 | ||
![]() |
86cf083902 | ||
![]() |
5c1f7ed18d | ||
![]() |
d051cbcafb | ||
![]() |
798af687cf | ||
![]() |
01a682cfaa | ||
![]() |
67b9a44160 | ||
![]() |
8fe17d9270 | ||
![]() |
0a684bdb12 | ||
![]() |
9222a3c9c0 | ||
![]() |
92cadb4c55 | ||
![]() |
8b3bf547d7 | ||
![]() |
81fc15d6ac | ||
![]() |
63b507a589 | ||
![]() |
af9b1e5b1e | ||
![]() |
062103ae24 | ||
![]() |
48807a65dd | ||
![]() |
0636e49fe2 | ||
![]() |
543d6efec4 | ||
![]() |
80f7f07341 | ||
![]() |
ec721c41c1 | ||
![]() |
03ca32ced4 | ||
![]() |
cb16a34401 | ||
![]() |
d756fd7e14 | ||
![]() |
c559bd47c3 | ||
![]() |
a2b3427be9 | ||
![]() |
6a2d7bad03 | ||
![]() |
cfdefbf043 | ||
![]() |
d7e3dc41ff | ||
![]() |
9afb50242b | ||
![]() |
52b02d1235 | ||
![]() |
84bc72d485 | ||
![]() |
bd772bb28a | ||
![]() |
fd2c7c3cc3 | ||
![]() |
a7f139d3e1 | ||
![]() |
8a45e0fd85 | ||
![]() |
52290b485b | ||
![]() |
525d0fd8ea | ||
![]() |
40c83f4c1e | ||
![]() |
99088ad880 | ||
![]() |
37c077205a | ||
![]() |
ac5f9dcb59 | ||
![]() |
6a9269c052 | ||
![]() |
de615bfc1d | ||
![]() |
3ee639b133 | ||
![]() |
632e569347 | ||
![]() |
cc74831113 | ||
![]() |
78c6868ad3 | ||
![]() |
f5f6e8b659 | ||
![]() |
c91a815cca | ||
![]() |
1efe01c21f | ||
![]() |
c54ff06e0f | ||
![]() |
5facf4e790 | ||
![]() |
34752466d5 | ||
![]() |
20ea71f7ff | ||
![]() |
ac27e3ac0d | ||
![]() |
b31e3ce234 | ||
![]() |
e1c9c8b786 | ||
![]() |
23e03a95f4 | ||
![]() |
a2b8df0a6a | ||
![]() |
6ef4f3cc67 | ||
![]() |
1fb4d1cc11 | ||
![]() |
65b1729314 | ||
![]() |
c7e3d86e2d | ||
![]() |
5d06ebe430 | ||
![]() |
5aba616ba4 | ||
![]() |
767f435090 | ||
![]() |
26024053ed | ||
![]() |
324b059970 | ||
![]() |
76e916a07e | ||
![]() |
582b128ad9 | ||
![]() |
c01d788c4c | ||
![]() |
8fb66bcf18 | ||
![]() |
fdd96ae21c | ||
![]() |
1355ef192d | ||
![]() |
f8bab20728 | ||
![]() |
9a3702bc1a | ||
![]() |
a7c6699f6a | ||
![]() |
fa7626f83a | ||
![]() |
84b265a2e0 | ||
![]() |
debcafa962 | ||
![]() |
4634ef82c6 | ||
![]() |
5b18fb6b12 | ||
![]() |
d42ec12ae8 | ||
![]() |
86133f8ecd | ||
![]() |
12c951f62d | ||
![]() |
fcb3e2eb55 | ||
![]() |
176e511180 | ||
![]() |
696dcf6149 | ||
![]() |
8030b346e0 | ||
![]() |
53d97ce0c6 | ||
![]() |
77523f7bec | ||
![]() |
f4d69f1811 | ||
![]() |
cf5a0dc548 | ||
![]() |
a8cc3ae6ef | ||
![]() |
362bd8fd21 | ||
![]() |
2274de969f | ||
![]() |
dfed251c7a | ||
![]() |
151d4bdd73 | ||
![]() |
c5d4ebcd48 | ||
![]() |
0ad559adcd | ||
![]() |
39f5b91f12 | ||
![]() |
ddee79d209 | ||
![]() |
ff111253d5 | ||
![]() |
31193abb7b | ||
![]() |
ae266e1692 | ||
![]() |
c315a15816 | ||
![]() |
3bd732147c | ||
![]() |
ddbde93a6d | ||
![]() |
6db11a8ade | ||
![]() |
42e78408a7 | ||
![]() |
15e8940c7f | ||
![]() |
644ec45ded | ||
![]() |
a8d2743f56 | ||
![]() |
0acef4a6e6 | ||
![]() |
5733db94aa | ||
![]() |
da8c6cf111 | ||
![]() |
802ee25a8b | ||
![]() |
ce8b107f1e | ||
![]() |
32936e5de0 | ||
![]() |
c35746c3e1 | ||
![]() |
392dd9f904 | ||
![]() |
d8f792950b | ||
![]() |
1f6cdc3018 | ||
![]() |
616f1903b7 | ||
![]() |
997a51fc42 | ||
![]() |
cda6325be4 | ||
![]() |
c8cc6fe003 | ||
![]() |
34939cfe52 | ||
![]() |
37bc703bbb | ||
![]() |
5f8e41b441 | ||
![]() |
606db3585c | ||
![]() |
4054749eb2 | ||
![]() |
ad5827d33f | ||
![]() |
249464e928 | ||
![]() |
3bc55c054a | ||
![]() |
4c108eea64 | ||
![]() |
9b2dbd634d | ||
![]() |
2cb2a48184 | ||
![]() |
ed5a0b511e | ||
![]() |
1475dcb50b | ||
![]() |
5cd7f6fd84 | ||
![]() |
52cc17fa3f | ||
![]() |
fa6949f4e4 | ||
![]() |
63a4cee770 | ||
![]() |
7aed0c1b0d | ||
![]() |
de592a6ef4 | ||
![]() |
ff7086c0d0 | ||
![]() |
ef0352ecd6 | ||
![]() |
7348745049 | ||
![]() |
2078044062 | ||
![]() |
d254937590 | ||
![]() |
9a8e52d1fc | ||
![]() |
6e7fac5493 | ||
![]() |
129a37a1f4 | ||
![]() |
01382e774e | ||
![]() |
9164d35615 | ||
![]() |
58df65541c | ||
![]() |
4c04f364a3 | ||
![]() |
7f39538231 | ||
![]() |
be98e0c0f4 | ||
![]() |
9491b1ff89 | ||
![]() |
30cbb039d0 | ||
![]() |
1aabca9489 | ||
![]() |
28a87db515 | ||
![]() |
05b648629f | ||
![]() |
d1d8446480 | ||
![]() |
8b897ba537 | ||
![]() |
c8f1b222c0 | ||
![]() |
257e2ceb82 | ||
![]() |
67a27cae40 | ||
![]() |
8ff9c08e82 | ||
![]() |
1b0aa30881 | ||
![]() |
2a8d2d2b48 | ||
![]() |
44bd787276 | ||
![]() |
690f1c07a7 | ||
![]() |
8e185a8413 | ||
![]() |
1f7df73964 | ||
![]() |
a10afc45b1 | ||
![]() |
61a2101d8a | ||
![]() |
088832c253 | ||
![]() |
a545b680b3 | ||
![]() |
805017eabf | ||
![]() |
b7412b0679 | ||
![]() |
fff3bfd01e | ||
![]() |
5f165a79ba | ||
![]() |
0d3acd1aca | ||
![]() |
463f196472 | ||
![]() |
52d5df6778 | ||
![]() |
ce75c85e65 | ||
![]() |
12fd61142d | ||
![]() |
0073227785 | ||
![]() |
89a215cc1f | ||
![]() |
b2aece8208 | ||
![]() |
600bf91c4f | ||
![]() |
da6bdfa795 | ||
![]() |
5d4894a1ba | ||
![]() |
d4c047bd01 | ||
![]() |
6183b9719c | ||
![]() |
f02d67ee47 | ||
![]() |
bd156ebb53 | ||
![]() |
b07236b544 | ||
![]() |
5928a31fc4 | ||
![]() |
3a71ea7003 | ||
![]() |
96900b1f1b | ||
![]() |
65b39661a6 | ||
![]() |
18251ae8ae | ||
![]() |
c418e0ea76 | ||
![]() |
74b009ccd7 | ||
![]() |
d2631bf398 | ||
![]() |
c62358d851 | ||
![]() |
e3af04701a | ||
![]() |
c2f6e319f2 | ||
![]() |
61b37877be | ||
![]() |
e72c5a037b | ||
![]() |
578383411c | ||
![]() |
dbd37d6575 | ||
![]() |
c7cf1e7593 | ||
![]() |
c06fb069ab | ||
![]() |
b6c2259bd7 | ||
![]() |
d0b7cc8ab3 |
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "Supervisor dev",
|
||||
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
|
||||
"image": "ghcr.io/home-assistant/devcontainer:2-supervisor",
|
||||
"containerEnv": {
|
||||
"WORKSPACE_DIRECTORY": "${containerWorkspaceFolder}"
|
||||
},
|
||||
@@ -44,5 +44,8 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"mounts": ["type=volume,target=/var/lib/docker"]
|
||||
"mounts": [
|
||||
"type=volume,target=/var/lib/docker",
|
||||
"type=volume,target=/mnt/supervisor"
|
||||
]
|
||||
}
|
||||
|
69
.github/ISSUE_TEMPLATE.md
vendored
69
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,69 +0,0 @@
|
||||
---
|
||||
name: Report a bug with the Supervisor on a supported System
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
---
|
||||
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with an add-on, make an issue in it's repository.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Important: You can only fill a bug repport for an supported system! If you run an unsupported installation. This report would be closed without comment.
|
||||
-->
|
||||
|
||||
### Describe the issue
|
||||
|
||||
<!-- Provide as many details as possible. -->
|
||||
|
||||
### Steps to reproduce
|
||||
|
||||
<!-- What do you do to encounter the issue. -->
|
||||
|
||||
1. ...
|
||||
2. ...
|
||||
3. ...
|
||||
|
||||
### Enviroment details
|
||||
|
||||
<!-- You can find these details in the system tab of the supervisor panel, or by using the `ha` CLI. -->
|
||||
|
||||
- **Operating System:**: xxx
|
||||
- **Supervisor version:**: xxx
|
||||
- **Home Assistant version**: xxx
|
||||
|
||||
### Supervisor logs
|
||||
|
||||
<details>
|
||||
<summary>Supervisor logs</summary>
|
||||
<!--
|
||||
- Frontend -> Supervisor -> System
|
||||
- Or use this command: ha supervisor logs
|
||||
- Logs are more than just errors, even if you don't think it's important, it is.
|
||||
-->
|
||||
|
||||
```
|
||||
Paste supervisor logs here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### System Information
|
||||
|
||||
<details>
|
||||
<summary>System Information</summary>
|
||||
<!--
|
||||
- Use this command: ha info
|
||||
-->
|
||||
|
||||
```
|
||||
Paste system info here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
20
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
20
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: Bug Report Form
|
||||
name: Report an issue with Home Assistant Supervisor
|
||||
description: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@@ -9,7 +8,7 @@ body:
|
||||
|
||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
||||
|
||||
[fr]: https://community.home-assistant.io/c/feature-requests
|
||||
[fr]: https://github.com/orgs/home-assistant/discussions
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
@@ -26,7 +25,7 @@ body:
|
||||
attributes:
|
||||
label: What type of installation are you running?
|
||||
description: >
|
||||
If you don't know, can be found in [Settings -> System -> Repairs -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
If you don't know, can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
It is listed as the `Installation Type` value.
|
||||
options:
|
||||
- Home Assistant OS
|
||||
@@ -72,20 +71,21 @@ body:
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: System Health information
|
||||
label: System information
|
||||
description: >
|
||||
System Health information can be found in the top right menu in [Settings -> System -> Repairs](https://my.home-assistant.io/redirect/repairs/).
|
||||
The System information can be found in [Settings -> System -> Repairs -> (three dot menu) -> System Information](https://my.home-assistant.io/redirect/system_health/).
|
||||
Click the copy button at the bottom of the pop-up and paste it here.
|
||||
|
||||
|
||||
[](https://my.home-assistant.io/redirect/system_health/)
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Supervisor diagnostics
|
||||
placeholder: "drag-and-drop the diagnostics data file here (do not copy-and-paste the content)"
|
||||
description: >-
|
||||
Supervisor diagnostics can be found in [Settings -> Integrations](https://my.home-assistant.io/redirect/integrations/).
|
||||
Find the card that says `Home Assistant Supervisor`, open its menu and select 'Download diagnostics'.
|
||||
|
||||
Supervisor diagnostics can be found in [Settings -> Devices & services](https://my.home-assistant.io/redirect/integrations/).
|
||||
Find the card that says `Home Assistant Supervisor`, open it, and select the three dot menu of the Supervisor integration entry
|
||||
and select 'Download diagnostics'.
|
||||
|
||||
**Please drag-and-drop the downloaded file into the textbox below. Do not copy and paste its contents.**
|
||||
- type: textarea
|
||||
attributes:
|
||||
|
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -13,7 +13,7 @@ contact_links:
|
||||
about: Our documentation has its own issue tracker. Please report issues with the website there.
|
||||
|
||||
- name: Request a feature for the Supervisor
|
||||
url: https://community.home-assistant.io/c/feature-requests
|
||||
url: https://github.com/orgs/home-assistant/discussions
|
||||
about: Request an new feature for the Supervisor.
|
||||
|
||||
- name: I have a question or need support
|
||||
|
53
.github/ISSUE_TEMPLATE/task.yml
vendored
Normal file
53
.github/ISSUE_TEMPLATE/task.yml
vendored
Normal file
@@ -0,0 +1,53 @@
|
||||
name: Task
|
||||
description: For staff only - Create a task
|
||||
type: Task
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## ⚠️ RESTRICTED ACCESS
|
||||
|
||||
**This form is restricted to Open Home Foundation staff and authorized contributors only.**
|
||||
|
||||
If you are a community member wanting to contribute, please:
|
||||
- For bug reports: Use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)
|
||||
- For feature requests: Submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)
|
||||
|
||||
---
|
||||
|
||||
### For authorized contributors
|
||||
|
||||
Use this form to create tasks for development work, improvements, or other actionable items that need to be tracked.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
description: |
|
||||
Provide a clear and detailed description of the task that needs to be accomplished.
|
||||
|
||||
Be specific about what needs to be done, why it's important, and any constraints or requirements.
|
||||
placeholder: |
|
||||
Describe the task, including:
|
||||
- What needs to be done
|
||||
- Why this task is needed
|
||||
- Expected outcome
|
||||
- Any constraints or requirements
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional_context
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Any additional information, links, research, or context that would be helpful.
|
||||
|
||||
Include links to related issues, research, prototypes, roadmap opportunities etc.
|
||||
placeholder: |
|
||||
- Roadmap opportunity: [link]
|
||||
- Epic: [link]
|
||||
- Feature request: [link]
|
||||
- Technical design documents: [link]
|
||||
- Prototype/mockup: [link]
|
||||
- Dependencies: [links]
|
||||
validations:
|
||||
required: false
|
288
.github/copilot-instructions.md
vendored
Normal file
288
.github/copilot-instructions.md
vendored
Normal file
@@ -0,0 +1,288 @@
|
||||
# GitHub Copilot & Claude Code Instructions
|
||||
|
||||
This repository contains the Home Assistant Supervisor, a Python 3 based container
|
||||
orchestration and management system for Home Assistant.
|
||||
|
||||
## Supervisor Capabilities & Features
|
||||
|
||||
### Architecture Overview
|
||||
|
||||
Home Assistant Supervisor is a Python-based container orchestration system that
|
||||
communicates with the Docker daemon to manage containerized components. It is tightly
|
||||
integrated with the underlying Operating System and core Operating System components
|
||||
through D-Bus.
|
||||
|
||||
**Managed Components:**
|
||||
- **Home Assistant Core**: The main home automation application running in its own
|
||||
container (also provides the web interface)
|
||||
- **Add-ons**: Third-party applications and services (each add-on runs in its own
|
||||
container)
|
||||
- **Plugins**: Built-in system services like DNS, Audio, CLI, Multicast, and Observer
|
||||
- **Host System Integration**: OS-level operations and hardware access via D-Bus
|
||||
- **Container Networking**: Internal Docker network management and external
|
||||
connectivity
|
||||
- **Storage & Backup**: Data persistence and backup management across all containers
|
||||
|
||||
**Key Dependencies:**
|
||||
- **Docker Engine**: Required for all container operations
|
||||
- **D-Bus**: System-level communication with the host OS
|
||||
- **systemd**: Service management for host system operations
|
||||
- **NetworkManager**: Network configuration and management
|
||||
|
||||
### Add-on System
|
||||
|
||||
**Add-on Architecture**: Add-ons are containerized applications available through
|
||||
add-on stores. Each store contains multiple add-ons, and each add-on includes metadata
|
||||
that tells Supervisor the version, startup configuration (permissions), and available
|
||||
user configurable options. Add-on metadata typically references a container image that
|
||||
Supervisor fetches during installation. If not, the Supervisor builds the container
|
||||
image from a Dockerfile.
|
||||
|
||||
**Built-in Stores**: Supervisor comes with several pre-configured stores:
|
||||
- **Core Add-ons**: Official add-ons maintained by the Home Assistant team
|
||||
- **Community Add-ons**: Popular third-party add-ons repository
|
||||
- **ESPHome**: Add-ons for ESPHome ecosystem integration
|
||||
- **Music Assistant**: Audio and music-related add-ons
|
||||
- **Local Development**: Local folder for testing custom add-ons during development
|
||||
|
||||
**Store Management**: Stores are Git-based repositories that are periodically updated.
|
||||
When updates are available, users receive notifications.
|
||||
|
||||
**Add-on Lifecycle**:
|
||||
- **Installation**: Supervisor fetches or builds container images based on add-on
|
||||
metadata
|
||||
- **Configuration**: Schema-validated options with integrated UI management
|
||||
- **Runtime**: Full container lifecycle management, health monitoring
|
||||
- **Updates**: Automatic or manual version management
|
||||
|
||||
### Update System
|
||||
|
||||
**Core Components**: Supervisor, Home Assistant Core, HAOS, and built-in plugins
|
||||
receive version information from a central JSON file fetched from
|
||||
`https://version.home-assistant.io/{channel}.json`. The `Updater` class handles
|
||||
fetching this data, validating signatures, and updating internal version tracking.
|
||||
|
||||
**Update Channels**: Three channels (`stable`/`beta`/`dev`) determine which version
|
||||
JSON file is fetched, allowing users to opt into different release streams.
|
||||
|
||||
**Add-on Updates**: Add-on version information comes from store repository updates, not
|
||||
the central JSON file. When repositories are refreshed via the store system, add-ons
|
||||
compare their local versions against repository versions to determine update
|
||||
availability.
|
||||
|
||||
### Backup & Recovery System
|
||||
|
||||
**Backup Capabilities**:
|
||||
- **Full Backups**: Complete system state capture including all add-ons,
|
||||
configuration, and data
|
||||
- **Partial Backups**: Selective backup of specific components (Home Assistant,
|
||||
add-ons, folders)
|
||||
- **Encrypted Backups**: Optional backup encryption with user-provided passwords
|
||||
- **Multiple Storage Locations**: Local storage and remote backup destinations
|
||||
|
||||
**Recovery Features**:
|
||||
- **One-click Restore**: Simple restoration from backup files
|
||||
- **Selective Restore**: Choose specific components to restore
|
||||
- **Automatic Recovery**: Self-healing for common system issues
|
||||
|
||||
---
|
||||
|
||||
## Supervisor Development
|
||||
|
||||
### Python Requirements
|
||||
|
||||
- **Compatibility**: Python 3.13+
|
||||
- **Language Features**: Use modern Python features:
|
||||
- Type hints with `typing` module
|
||||
- f-strings (preferred over `%` or `.format()`)
|
||||
- Dataclasses and enum classes
|
||||
- Async/await patterns
|
||||
- Pattern matching where appropriate
|
||||
|
||||
### Code Quality Standards
|
||||
|
||||
- **Formatting**: Ruff
|
||||
- **Linting**: PyLint and Ruff
|
||||
- **Type Checking**: MyPy
|
||||
- **Testing**: pytest with asyncio support
|
||||
- **Language**: American English for all code, comments, and documentation
|
||||
|
||||
### Code Organization
|
||||
|
||||
**Core Structure**:
|
||||
```
|
||||
supervisor/
|
||||
├── __init__.py # Package initialization
|
||||
├── const.py # Constants and enums
|
||||
├── coresys.py # Core system management
|
||||
├── bootstrap.py # System initialization
|
||||
├── exceptions.py # Custom exception classes
|
||||
├── api/ # REST API endpoints
|
||||
├── addons/ # Add-on management
|
||||
├── backups/ # Backup system
|
||||
├── docker/ # Docker integration
|
||||
├── host/ # Host system interface
|
||||
├── homeassistant/ # Home Assistant Core management
|
||||
├── dbus/ # D-Bus system integration
|
||||
├── hardware/ # Hardware detection and management
|
||||
├── plugins/ # Plugin system
|
||||
├── resolution/ # Issue detection and resolution
|
||||
├── security/ # Security management
|
||||
├── services/ # Service discovery and management
|
||||
├── store/ # Add-on store management
|
||||
└── utils/ # Utility functions
|
||||
```
|
||||
|
||||
**Shared Constants**: Use constants from `supervisor/const.py` instead of hardcoding
|
||||
values. Define new constants following existing patterns and group related constants
|
||||
together.
|
||||
|
||||
### Supervisor Architecture Patterns
|
||||
|
||||
**CoreSysAttributes Inheritance Pattern**: Nearly all major classes in Supervisor
|
||||
inherit from `CoreSysAttributes`, providing access to the centralized system state
|
||||
via `self.coresys` and convenient `sys_*` properties.
|
||||
|
||||
```python
|
||||
# Standard Supervisor class pattern
|
||||
class MyManager(CoreSysAttributes):
|
||||
"""Manage my functionality."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize manager."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self._component: MyComponent = MyComponent(coresys)
|
||||
|
||||
@property
|
||||
def component(self) -> MyComponent:
|
||||
"""Return component handler."""
|
||||
return self._component
|
||||
|
||||
# Access system components via inherited properties
|
||||
async def do_something(self):
|
||||
await self.sys_docker.containers.get("my_container")
|
||||
self.sys_bus.fire_event(BusEvent.MY_EVENT, {"data": "value"})
|
||||
```
|
||||
|
||||
**Key Inherited Properties from CoreSysAttributes**:
|
||||
- `self.sys_docker` - Docker API access
|
||||
- `self.sys_run_in_executor()` - Execute blocking operations
|
||||
- `self.sys_create_task()` - Create async tasks
|
||||
- `self.sys_bus` - Event bus for system events
|
||||
- `self.sys_config` - System configuration
|
||||
- `self.sys_homeassistant` - Home Assistant Core management
|
||||
- `self.sys_addons` - Add-on management
|
||||
- `self.sys_host` - Host system access
|
||||
- `self.sys_dbus` - D-Bus system interface
|
||||
|
||||
**Load Pattern**: Many components implement a `load()` method which effectively
|
||||
initialize the component from external sources (containers, files, D-Bus services).
|
||||
|
||||
### API Development
|
||||
|
||||
**REST API Structure**:
|
||||
- **Base Path**: `/api/` for all endpoints
|
||||
- **Authentication**: Bearer token authentication
|
||||
- **Consistent Response Format**: `{"result": "ok", "data": {...}}` or
|
||||
`{"result": "error", "message": "..."}`
|
||||
- **Validation**: Use voluptuous schemas with `api_validate()`
|
||||
|
||||
**Use `@api_process` Decorator**: This decorator handles all standard error handling
|
||||
and response formatting automatically. The decorator catches `APIError`, `HassioError`,
|
||||
and other exceptions, returning appropriate HTTP responses.
|
||||
|
||||
```python
|
||||
from ..api.utils import api_process, api_validate
|
||||
|
||||
@api_process
|
||||
async def backup_full(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Create full backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||
job = await self.sys_backups.do_backup_full(**body)
|
||||
return {ATTR_JOB_ID: job.uuid}
|
||||
```
|
||||
|
||||
### Docker Integration
|
||||
|
||||
- **Container Management**: Use Supervisor's Docker manager instead of direct
|
||||
Docker API
|
||||
- **Networking**: Supervisor manages internal Docker networks with predefined IP
|
||||
ranges
|
||||
- **Security**: AppArmor profiles, capability restrictions, and user namespace
|
||||
isolation
|
||||
- **Health Checks**: Implement health monitoring for all managed containers
|
||||
|
||||
### D-Bus Integration
|
||||
|
||||
- **Use dbus-fast**: Async D-Bus library for system integration
|
||||
- **Service Management**: systemd, NetworkManager, hostname management
|
||||
- **Error Handling**: Wrap D-Bus exceptions in Supervisor-specific exceptions
|
||||
|
||||
### Async Programming
|
||||
|
||||
- **All I/O operations must be async**: File operations, network calls, subprocess
|
||||
execution
|
||||
- **Use asyncio patterns**: Prefer `asyncio.gather()` over sequential awaits
|
||||
- **Executor jobs**: Use `self.sys_run_in_executor()` for blocking operations
|
||||
- **Two-phase initialization**: `__init__` for sync setup, `post_init()` for async
|
||||
initialization
|
||||
|
||||
### Testing
|
||||
|
||||
- **Location**: `tests/` directory with module mirroring
|
||||
- **Fixtures**: Extensive use of pytest fixtures for CoreSys setup
|
||||
- **Mocking**: Mock external dependencies (Docker, D-Bus, network calls)
|
||||
- **Coverage**: Minimum 90% test coverage, 100% for security-sensitive code
|
||||
|
||||
### Error Handling
|
||||
|
||||
- **Custom Exceptions**: Defined in `exceptions.py` with clear inheritance hierarchy
|
||||
- **Error Propagation**: Use `from` clause for exception chaining
|
||||
- **API Errors**: Use `APIError` with appropriate HTTP status codes
|
||||
|
||||
### Security Considerations
|
||||
|
||||
- **Container Security**: AppArmor profiles mandatory for add-ons, minimal
|
||||
capabilities
|
||||
- **Authentication**: Token-based API authentication with role-based access
|
||||
- **Data Protection**: Backup encryption, secure secret management, comprehensive
|
||||
input validation
|
||||
|
||||
### Development Commands
|
||||
|
||||
```bash
|
||||
# Run tests, adjust paths as necessary
|
||||
pytest -qsx tests/
|
||||
|
||||
# Linting and formatting
|
||||
ruff check supervisor/
|
||||
ruff format supervisor/
|
||||
|
||||
# Type checking
|
||||
mypy --ignore-missing-imports supervisor/
|
||||
|
||||
# Pre-commit hooks
|
||||
pre-commit run --all-files
|
||||
```
|
||||
|
||||
Always run the pre-commit hooks at the end of code editing.
|
||||
|
||||
### Common Patterns to Follow
|
||||
|
||||
**✅ Use These Patterns**:
|
||||
- Inherit from `CoreSysAttributes` for system access
|
||||
- Use `@api_process` decorator for API endpoints
|
||||
- Use `self.sys_run_in_executor()` for blocking operations
|
||||
- Access Docker via `self.sys_docker` not direct Docker API
|
||||
- Use constants from `const.py` instead of hardcoding
|
||||
- Store types in (per-module) `const.py` (e.g. supervisor/store/const.py)
|
||||
|
||||
**❌ Avoid These Patterns**:
|
||||
- Direct Docker API usage - use Supervisor's Docker manager
|
||||
- Blocking operations in async context (use asyncio alternatives)
|
||||
- Hardcoded values - use constants from `const.py`
|
||||
- Manual error handling in API endpoints - let `@api_process` handle it
|
||||
|
||||
This guide provides the foundation for contributing to Home Assistant Supervisor.
|
||||
Follow these patterns and guidelines to ensure code quality, security, and
|
||||
maintainability.
|
31
.github/workflows/builder.yml
vendored
31
.github/workflows/builder.yml
vendored
@@ -33,7 +33,7 @@ on:
|
||||
- setup.py
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
BUILD_NAME: supervisor
|
||||
BUILD_TYPE: supervisor
|
||||
|
||||
@@ -53,7 +53,7 @@ jobs:
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -70,7 +70,7 @@ jobs:
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
if: steps.version.outputs.publish == 'false'
|
||||
uses: masesgroup/retrieve-changed-files@v3.0.0
|
||||
uses: masesgroup/retrieve-changed-files@491e80760c0e28d36ca6240a27b1ccb8e1402c13 # v3.0.0
|
||||
|
||||
- name: Check if requirements files changed
|
||||
id: requirements
|
||||
@@ -92,7 +92,7 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -104,11 +104,12 @@ jobs:
|
||||
echo "CARGO_NET_GIT_FETCH_WITH_CLI=true"
|
||||
) > .env_file
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2025.09.0
|
||||
with:
|
||||
abi: cp312
|
||||
abi: cp313
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
@@ -125,15 +126,15 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Install Cosign
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
with:
|
||||
cosign-release: "v2.4.0"
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Install dirhash and calc hash
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
@@ -149,7 +150,7 @@ jobs:
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v3.3.0
|
||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
@@ -159,8 +160,9 @@ jobs:
|
||||
if: needs.init.outputs.publish == 'false'
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -178,7 +180,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize git
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
@@ -203,11 +205,12 @@ jobs:
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build the Supervisor
|
||||
if: needs.init.outputs.publish != 'true'
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
|
124
.github/workflows/ci.yaml
vendored
124
.github/workflows/ci.yaml
vendored
@@ -8,8 +8,9 @@ on:
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
MYPY_CACHE_VERSION: 1
|
||||
|
||||
concurrency:
|
||||
group: "${{ github.workflow }}-${{ github.ref }}"
|
||||
@@ -25,15 +26,15 @@ jobs:
|
||||
name: Prepare Python dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -47,7 +48,7 @@ jobs:
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -67,15 +68,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -87,7 +88,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@@ -110,15 +111,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -130,7 +131,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@@ -153,7 +154,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -168,15 +169,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -188,7 +189,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@@ -212,15 +213,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -232,7 +233,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: |
|
||||
@@ -256,15 +257,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -286,25 +287,71 @@ jobs:
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
mypy:
|
||||
name: Check mypy
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Generate partial mypy restore key
|
||||
id: generate-mypy-key
|
||||
run: |
|
||||
mypy_version=$(cat requirements_test.txt | grep mypy | cut -d '=' -f 3)
|
||||
echo "version=$mypy_version" >> $GITHUB_OUTPUT
|
||||
echo "key=mypy-${{ env.MYPY_CACHE_VERSION }}-$mypy_version-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
${{ runner.os }}-mypy-${{ needs.prepare.outputs.python-version }}-${{ steps.generate-mypy-key.outputs.key }}
|
||||
restore-keys: >-
|
||||
${{ runner.os }}-venv-${{ needs.prepare.outputs.python-version }}-mypy-${{ env.MYPY_CACHE_VERSION }}-${{ steps.generate-mypy-key.outputs.version }}
|
||||
- name: Register mypy problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/mypy.json"
|
||||
- name: Run mypy
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
mypy --ignore-missing-imports supervisor
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
name: Run tests Python ${{ needs.prepare.outputs.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@v3.7.0
|
||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
||||
with:
|
||||
cosign-release: "v2.4.0"
|
||||
cosign-release: "v2.5.3"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -339,9 +386,9 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
name: coverage
|
||||
path: .coverage
|
||||
include-hidden-files: true
|
||||
|
||||
@@ -351,15 +398,15 @@ jobs:
|
||||
needs: ["pytest", "prepare"]
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Set up Python ${{ needs.prepare.outputs.python-version }}
|
||||
uses: actions/setup-python@v5.3.0
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ needs.prepare.outputs.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.0
|
||||
uses: actions/cache@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -370,7 +417,10 @@ jobs:
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||
with:
|
||||
name: coverage
|
||||
path: coverage/
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -378,4 +428,4 @@ jobs:
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v5.1.2
|
||||
uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1
|
||||
|
2
.github/workflows/lock.yml
vendored
2
.github/workflows/lock.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v5.0.1
|
||||
- uses: dessant/lock-threads@1bf7ec25051fe7c00bdd17e6a7cf3d7bfb7dc771 # v5.0.1
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-inactive-days: "30"
|
||||
|
16
.github/workflows/matchers/mypy.json
vendored
Normal file
16
.github/workflows/matchers/mypy.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "mypy",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):\\s(error|warning):\\s(.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"severity": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
4
.github/workflows/release-drafter.yml
vendored
4
.github/workflows/release-drafter.yml
vendored
@@ -11,7 +11,7 @@ jobs:
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -36,7 +36,7 @@ jobs:
|
||||
echo "version=$datepre.$newpost" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v6.0.0
|
||||
uses: release-drafter/release-drafter@b1476f6e6eb133afa41ed8589daba6dc69b4d3f5 # v6.1.0
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }}
|
||||
|
58
.github/workflows/restrict-task-creation.yml
vendored
Normal file
58
.github/workflows/restrict-task-creation.yml
vendored
Normal file
@@ -0,0 +1,58 @@
|
||||
name: Restrict task creation
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
jobs:
|
||||
check-authorization:
|
||||
runs-on: ubuntu-latest
|
||||
# Only run if this is a Task issue type (from the issue form)
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
||||
// Check if user is an organization member
|
||||
try {
|
||||
await github.rest.orgs.checkMembershipForUser({
|
||||
org: 'home-assistant',
|
||||
username: issueAuthor
|
||||
});
|
||||
console.log(`✅ ${issueAuthor} is an organization member`);
|
||||
return; // Authorized
|
||||
} catch (error) {
|
||||
console.log(`❌ ${issueAuthor} is not authorized to create Task issues`);
|
||||
}
|
||||
|
||||
// Close the issue with a comment
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
body: `Hi @${issueAuthor}, thank you for your contribution!\n\n` +
|
||||
`Task issues are restricted to Open Home Foundation staff and authorized contributors.\n\n` +
|
||||
`If you would like to:\n` +
|
||||
`- Report a bug: Please use the [bug report form](https://github.com/home-assistant/supervisor/issues/new?template=bug_report.yml)\n` +
|
||||
`- Request a feature: Please submit to [Feature Requests](https://github.com/orgs/home-assistant/discussions)\n\n` +
|
||||
`If you believe you should have access to create Task issues, please contact the maintainers.`
|
||||
});
|
||||
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
state: 'closed'
|
||||
});
|
||||
|
||||
// Add a label to indicate this was auto-closed
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: context.issue.number,
|
||||
labels: ['auto-closed']
|
||||
});
|
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@@ -10,9 +10,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.7.0
|
||||
uses: getsentry/action-release@4f502acc1df792390abe36f2dcb03612ef144818 # v3.3.0
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v9.0.0
|
||||
- uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 30
|
||||
|
82
.github/workflows/update_frontend.yml
vendored
Normal file
82
.github/workflows/update_frontend.yml
vendored
Normal file
@@ -0,0 +1,82 @@
|
||||
name: Update frontend
|
||||
|
||||
on:
|
||||
schedule: # once a day
|
||||
- cron: "0 0 * * *"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
skip: ${{ steps.check_version.outputs.skip || steps.check_existing_pr.outputs.skip }}
|
||||
current_version: ${{ steps.check_version.outputs.current_version }}
|
||||
latest_version: ${{ steps.latest_frontend_version.outputs.latest_tag }}
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Get latest frontend release
|
||||
id: latest_frontend_version
|
||||
uses: abatilo/release-info-action@32cb932219f1cee3fc4f4a298fd65ead5d35b661 # v1.3.3
|
||||
with:
|
||||
owner: home-assistant
|
||||
repo: frontend
|
||||
- name: Check if version is up to date
|
||||
id: check_version
|
||||
run: |
|
||||
current_version="$(cat .ha-frontend-version)"
|
||||
latest_version="${{ steps.latest_frontend_version.outputs.latest_tag }}"
|
||||
echo "current_version=${current_version}" >> $GITHUB_OUTPUT
|
||||
echo "LATEST_VERSION=${latest_version}" >> $GITHUB_ENV
|
||||
if [[ ! "$current_version" < "$latest_version" ]]; then
|
||||
echo "Frontend version is up to date"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
- name: Check if there is no open PR with this version
|
||||
if: steps.check_version.outputs.skip != 'true'
|
||||
id: check_existing_pr
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
run: |
|
||||
PR=$(gh pr list --state open --base main --json title --search "Update frontend to version $LATEST_VERSION")
|
||||
if [[ "$PR" != "[]" ]]; then
|
||||
echo "Skipping - There is already a PR open for version $LATEST_VERSION"
|
||||
echo "skip=true" >> $GITHUB_OUTPUT
|
||||
fi
|
||||
create-pr:
|
||||
runs-on: ubuntu-latest
|
||||
needs: check-version
|
||||
if: needs.check-version.outputs.skip != 'true'
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
- name: Clear www folder
|
||||
run: |
|
||||
rm -rf supervisor/api/panel/*
|
||||
- name: Update version file
|
||||
run: |
|
||||
echo "${{ needs.check-version.outputs.latest_version }}" > .ha-frontend-version
|
||||
- name: Download release assets
|
||||
uses: robinraju/release-downloader@daf26c55d821e836577a15f77d86ddc078948b05 # v1.12
|
||||
with:
|
||||
repository: 'home-assistant/frontend'
|
||||
tag: ${{ needs.check-version.outputs.latest_version }}
|
||||
fileName: home_assistant_frontend_supervisor-${{ needs.check-version.outputs.latest_version }}.tar.gz
|
||||
extract: true
|
||||
out-file-path: supervisor/api/panel/
|
||||
- name: Remove release assets archive
|
||||
run: |
|
||||
rm -f supervisor/api/panel/home_assistant_frontend_supervisor-*.tar.gz
|
||||
- name: Create PR
|
||||
uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
|
||||
with:
|
||||
commit-message: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
||||
branch: autoupdate-frontend
|
||||
base: main
|
||||
draft: true
|
||||
sign-commits: true
|
||||
title: "Update frontend to version ${{ needs.check-version.outputs.latest_version }}"
|
||||
body: >
|
||||
Update frontend from ${{ needs.check-version.outputs.current_version }} to
|
||||
[${{ needs.check-version.outputs.latest_version }}](https://github.com/home-assistant/frontend/releases/tag/${{ needs.check-version.outputs.latest_version }})
|
||||
|
3
.gitignore
vendored
3
.gitignore
vendored
@@ -100,3 +100,6 @@ ENV/
|
||||
# mypy
|
||||
/.mypy_cache/*
|
||||
/.dmypy.json
|
||||
|
||||
# Mac
|
||||
.DS_Store
|
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -1,4 +0,0 @@
|
||||
[submodule "home-assistant-polymer"]
|
||||
path = home-assistant-polymer
|
||||
url = https://github.com/home-assistant/home-assistant-polymer
|
||||
branch = dev
|
1
.ha-frontend-version
Normal file
1
.ha-frontend-version
Normal file
@@ -0,0 +1 @@
|
||||
20250925.1
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.5.7
|
||||
rev: v0.11.10
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
@@ -8,8 +8,20 @@ repos:
|
||||
- id: ruff-format
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: local
|
||||
hooks:
|
||||
# Run mypy through our wrapper script in order to get the possible
|
||||
# pyenv and/or virtualenv activated; it may not have been e.g. if
|
||||
# committing from a GUI tool that was not launched from an activated
|
||||
# shell.
|
||||
- id: mypy
|
||||
name: mypy
|
||||
entry: script/run-in-env.sh mypy --ignore-missing-imports
|
||||
language: script
|
||||
types_or: [python, pyi]
|
||||
files: ^supervisor/.+\.(py|pyi)$
|
||||
|
12
Dockerfile
12
Dockerfile
@@ -9,7 +9,8 @@ ENV \
|
||||
|
||||
ARG \
|
||||
COSIGN_VERSION \
|
||||
BUILD_ARCH
|
||||
BUILD_ARCH \
|
||||
QEMU_CPU
|
||||
|
||||
# Install base
|
||||
WORKDIR /usr/src
|
||||
@@ -28,22 +29,23 @@ RUN \
|
||||
\
|
||||
&& curl -Lso /usr/bin/cosign "https://github.com/home-assistant/cosign/releases/download/${COSIGN_VERSION}/cosign_${BUILD_ARCH}" \
|
||||
&& chmod a+x /usr/bin/cosign \
|
||||
&& pip3 install uv==0.2.21
|
||||
&& pip3 install uv==0.8.9
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN \
|
||||
if [ "${BUILD_ARCH}" = "i386" ]; then \
|
||||
linux32 uv pip install --no-build -r requirements.txt; \
|
||||
setarch="linux32"; \
|
||||
else \
|
||||
uv pip install --no-build -r requirements.txt; \
|
||||
setarch=""; \
|
||||
fi \
|
||||
&& ${setarch} uv pip install --compile-bytecode --no-cache --no-build -r requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN \
|
||||
pip3 install -e ./supervisor \
|
||||
uv pip install --no-cache -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
|
12
build.yaml
12
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-hassio-supervisor
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.12-alpine3.20
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.12-alpine3.20
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.12-alpine3.20
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.12-alpine3.20
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.12-alpine3.20
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.13-alpine3.22
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.13-alpine3.22
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.13-alpine3.22
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.13-alpine3.22
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.13-alpine3.22
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
@@ -12,7 +12,7 @@ cosign:
|
||||
base_identity: https://github.com/home-assistant/docker-base/.*
|
||||
identity: https://github.com/home-assistant/supervisor/.*
|
||||
args:
|
||||
COSIGN_VERSION: 2.4.0
|
||||
COSIGN_VERSION: 2.5.3
|
||||
labels:
|
||||
io.hass.type: supervisor
|
||||
org.opencontainers.image.title: Home Assistant Supervisor
|
||||
|
Submodule home-assistant-polymer deleted from 46f0e0212d
@@ -1,5 +1,5 @@
|
||||
[build-system]
|
||||
requires = ["setuptools~=75.6.0", "wheel~=0.45.0"]
|
||||
requires = ["setuptools~=80.9.0", "wheel~=0.46.1"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
@@ -12,7 +12,7 @@ authors = [
|
||||
{ name = "The Home Assistant Authors", email = "hello@home-assistant.io" },
|
||||
]
|
||||
keywords = ["docker", "home-assistant", "api"]
|
||||
requires-python = ">=3.12.0"
|
||||
requires-python = ">=3.13.0"
|
||||
|
||||
[project.urls]
|
||||
"Homepage" = "https://www.home-assistant.io/"
|
||||
@@ -31,7 +31,7 @@ include-package-data = true
|
||||
include = ["supervisor*"]
|
||||
|
||||
[tool.pylint.MAIN]
|
||||
py-version = "3.12"
|
||||
py-version = "3.13"
|
||||
# Use a conservative default here; 2 should speed up most setups and not hurt
|
||||
# any too bad. Override on command line as appropriate.
|
||||
jobs = 2
|
||||
@@ -147,7 +147,7 @@ disable = [
|
||||
# "pointless-statement", # B018, ruff catches new occurrences, needs more work
|
||||
"raise-missing-from", # TRY200
|
||||
# "redefined-builtin", # A001, ruff is way more stricter, needs work
|
||||
"try-except-raise", # TRY302
|
||||
"try-except-raise", # TRY203
|
||||
"unused-argument", # ARG001, we don't use it
|
||||
"unused-format-string-argument", #F507
|
||||
"unused-format-string-key", # F504
|
||||
@@ -223,12 +223,16 @@ testpaths = ["tests"]
|
||||
norecursedirs = [".git"]
|
||||
log_format = "%(asctime)s.%(msecs)03d %(levelname)-8s %(threadName)s %(name)s:%(filename)s:%(lineno)s %(message)s"
|
||||
log_date_format = "%Y-%m-%d %H:%M:%S"
|
||||
asyncio_default_fixture_loop_scope = "function"
|
||||
asyncio_mode = "auto"
|
||||
filterwarnings = [
|
||||
"error",
|
||||
"ignore:pkg_resources is deprecated as an API:DeprecationWarning:dirhash",
|
||||
"ignore::pytest.PytestUnraisableExceptionWarning",
|
||||
]
|
||||
markers = [
|
||||
"no_mock_init_websession: disable the autouse mock of init_websession for this test",
|
||||
]
|
||||
|
||||
[tool.ruff]
|
||||
lint.select = [
|
||||
@@ -271,7 +275,6 @@ lint.select = [
|
||||
"S317", # suspicious-xml-sax-usage
|
||||
"S318", # suspicious-xml-mini-dom-usage
|
||||
"S319", # suspicious-xml-pull-dom-usage
|
||||
"S320", # suspicious-xmle-tree-usage
|
||||
"S601", # paramiko-call
|
||||
"S602", # subprocess-popen-with-shell-equals-true
|
||||
"S604", # call-with-shell-equals-true
|
||||
@@ -289,7 +292,7 @@ lint.select = [
|
||||
"T20", # flake8-print
|
||||
"TID251", # Banned imports
|
||||
"TRY004", # Prefer TypeError exception for invalid type
|
||||
"TRY302", # Remove exception handler; error is immediately re-raised
|
||||
"TRY203", # Remove exception handler; error is immediately re-raised
|
||||
"UP", # pyupgrade
|
||||
"W", # pycodestyle
|
||||
]
|
||||
|
@@ -1,29 +1,30 @@
|
||||
aiodns==3.2.0
|
||||
aiohttp==3.11.11
|
||||
aiodns==3.5.0
|
||||
aiohttp==3.12.15
|
||||
atomicwrites-homeassistant==1.4.1
|
||||
attrs==24.3.0
|
||||
awesomeversion==24.6.0
|
||||
attrs==25.3.0
|
||||
awesomeversion==25.8.0
|
||||
blockbuster==1.5.25
|
||||
brotli==1.1.0
|
||||
ciso8601==2.3.2
|
||||
ciso8601==2.3.3
|
||||
colorlog==6.9.0
|
||||
cpe==1.3.1
|
||||
cryptography==44.0.0
|
||||
debugpy==1.8.11
|
||||
cryptography==46.0.1
|
||||
debugpy==1.8.17
|
||||
deepmerge==2.0
|
||||
dirhash==0.5.0
|
||||
docker==7.1.0
|
||||
faust-cchardet==2.1.19
|
||||
gitpython==3.1.43
|
||||
jinja2==3.1.5
|
||||
orjson==3.10.12
|
||||
pulsectl==24.11.0
|
||||
gitpython==3.1.45
|
||||
jinja2==3.1.6
|
||||
log-rate-limit==1.4.2
|
||||
orjson==3.11.3
|
||||
pulsectl==24.12.0
|
||||
pyudev==0.24.3
|
||||
PyYAML==6.0.2
|
||||
requests==2.32.3
|
||||
securetar==2024.11.0
|
||||
sentry-sdk==2.19.2
|
||||
setuptools==75.6.0
|
||||
requests==2.32.5
|
||||
securetar==2025.2.1
|
||||
sentry-sdk==2.38.0
|
||||
setuptools==80.9.0
|
||||
voluptuous==0.15.2
|
||||
dbus-fast==2.24.4
|
||||
typing_extensions==4.12.2
|
||||
zlib-fast==0.2.0
|
||||
dbus-fast==2.44.3
|
||||
zlib-fast==0.2.1
|
||||
|
@@ -1,13 +1,16 @@
|
||||
astroid==3.3.7
|
||||
coverage==7.6.9
|
||||
pre-commit==4.0.1
|
||||
pylint==3.3.2
|
||||
pytest-aiohttp==1.0.5
|
||||
pytest-asyncio==0.23.6
|
||||
pytest-cov==6.0.0
|
||||
pytest-timeout==2.3.1
|
||||
pytest==8.3.4
|
||||
ruff==0.8.4
|
||||
time-machine==2.16.0
|
||||
typing_extensions==4.12.2
|
||||
urllib3==2.3.0
|
||||
astroid==3.3.11
|
||||
coverage==7.10.7
|
||||
mypy==1.18.2
|
||||
pre-commit==4.3.0
|
||||
pylint==3.3.8
|
||||
pytest-aiohttp==1.1.0
|
||||
pytest-asyncio==0.25.2
|
||||
pytest-cov==7.0.0
|
||||
pytest-timeout==2.4.0
|
||||
pytest==8.4.2
|
||||
ruff==0.13.1
|
||||
time-machine==2.19.0
|
||||
types-docker==7.1.0.20250916
|
||||
types-pyyaml==6.0.12.20250915
|
||||
types-requests==2.32.4.20250913
|
||||
urllib3==2.5.0
|
||||
|
30
script/run-in-env.sh
Executable file
30
script/run-in-env.sh
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env sh
|
||||
set -eu
|
||||
|
||||
# Used in venv activate script.
|
||||
# Would be an error if undefined.
|
||||
OSTYPE="${OSTYPE-}"
|
||||
|
||||
# Activate pyenv and virtualenv if present, then run the specified command
|
||||
|
||||
# pyenv, pyenv-virtualenv
|
||||
if [ -s .python-version ]; then
|
||||
PYENV_VERSION=$(head -n 1 .python-version)
|
||||
export PYENV_VERSION
|
||||
fi
|
||||
|
||||
if [ -n "${VIRTUAL_ENV-}" ] && [ -f "${VIRTUAL_ENV}/bin/activate" ]; then
|
||||
. "${VIRTUAL_ENV}/bin/activate"
|
||||
else
|
||||
# other common virtualenvs
|
||||
my_path=$(git rev-parse --show-toplevel)
|
||||
|
||||
for venv in venv .venv .; do
|
||||
if [ -f "${my_path}/${venv}/bin/activate" ]; then
|
||||
. "${my_path}/${venv}/bin/activate"
|
||||
break
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
exec "$@"
|
@@ -1,27 +0,0 @@
|
||||
#!/bin/bash
|
||||
source "/etc/supervisor_scripts/common"
|
||||
|
||||
set -e
|
||||
|
||||
# Update frontend
|
||||
git submodule update --init --recursive --remote
|
||||
|
||||
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
||||
cd home-assistant-polymer
|
||||
nvm install
|
||||
script/bootstrap
|
||||
|
||||
# Download translations
|
||||
./script/translations_download
|
||||
|
||||
# build frontend
|
||||
cd hassio
|
||||
./script/build_hassio
|
||||
|
||||
# Copy frontend
|
||||
rm -rf ../../supervisor/api/panel/*
|
||||
cp -rf build/* ../../supervisor/api/panel/
|
||||
|
||||
# Reset frontend git
|
||||
cd ..
|
||||
git reset --hard HEAD
|
2
setup.py
2
setup.py
@@ -19,7 +19,7 @@ def _get_supervisor_version():
|
||||
for line in CONSTANTS.split("/n"):
|
||||
if match := RE_SUPERVISOR_VERSION.match(line):
|
||||
return match.group(1)
|
||||
return "99.9.9dev"
|
||||
return "9999.09.9.dev9999"
|
||||
|
||||
|
||||
setup(
|
||||
|
@@ -11,10 +11,12 @@ import zlib_fast
|
||||
# Enable fast zlib before importing supervisor
|
||||
zlib_fast.enable()
|
||||
|
||||
from supervisor import bootstrap # pylint: disable=wrong-import-position # noqa: E402
|
||||
from supervisor.utils.logging import ( # pylint: disable=wrong-import-position # noqa: E402
|
||||
activate_log_queue_handler,
|
||||
)
|
||||
# pylint: disable=wrong-import-position
|
||||
from supervisor import bootstrap # noqa: E402
|
||||
from supervisor.utils.blockbuster import BlockBusterManager # noqa: E402
|
||||
from supervisor.utils.logging import activate_log_queue_handler # noqa: E402
|
||||
|
||||
# pylint: enable=wrong-import-position
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -52,10 +54,11 @@ if __name__ == "__main__":
|
||||
_LOGGER.info("Initializing Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.set_debug(coresys.config.debug)
|
||||
if coresys.config.detect_blocking_io:
|
||||
BlockBusterManager.activate()
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
loop.run_until_complete(bootstrap.supervisor_debugger(coresys))
|
||||
|
||||
# Signal health startup for container
|
||||
run_os_startup_check_cleanup()
|
||||
@@ -63,8 +66,28 @@ if __name__ == "__main__":
|
||||
_LOGGER.info("Setting up Supervisor")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop, coresys)
|
||||
# Create startup task that can be cancelled gracefully
|
||||
startup_task = loop.create_task(coresys.core.start())
|
||||
|
||||
def shutdown_handler() -> None:
|
||||
"""Handle shutdown signals gracefully during startup."""
|
||||
if not startup_task.done():
|
||||
_LOGGER.warning("Supervisor startup interrupted by shutdown signal")
|
||||
startup_task.cancel()
|
||||
|
||||
coresys.create_task(coresys.core.stop())
|
||||
|
||||
bootstrap.register_signal_handlers(loop, shutdown_handler)
|
||||
|
||||
try:
|
||||
loop.run_until_complete(startup_task)
|
||||
except asyncio.CancelledError:
|
||||
_LOGGER.warning("Supervisor startup cancelled")
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
# Supervisor itself is running at this point, just something didn't
|
||||
# start as expected. Log with traceback to get more insights for
|
||||
# such cases.
|
||||
_LOGGER.critical("Supervisor start failed: %s", err, exc_info=True)
|
||||
|
||||
try:
|
||||
_LOGGER.info("Running Supervisor")
|
||||
|
@@ -6,6 +6,7 @@ from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
from datetime import datetime
|
||||
import errno
|
||||
from functools import partial
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from pathlib import Path, PurePath
|
||||
@@ -17,9 +18,9 @@ from tempfile import TemporaryDirectory
|
||||
from typing import Any, Final
|
||||
|
||||
import aiohttp
|
||||
from awesomeversion import AwesomeVersionCompareException
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionCompareException
|
||||
from deepmerge import Merger
|
||||
from securetar import atomic_contents_add, secure_path
|
||||
from securetar import AddFileError, atomic_contents_add, secure_path
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -32,8 +33,6 @@ from ..const import (
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_DATA,
|
||||
ATTR_EVENT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
@@ -49,7 +48,6 @@ from ..const import (
|
||||
ATTR_SYSTEM,
|
||||
ATTR_SYSTEM_MANAGED,
|
||||
ATTR_SYSTEM_MANAGED_CONFIG_ENTRY,
|
||||
ATTR_TYPE,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
@@ -69,17 +67,17 @@ from ..docker.monitor import DockerContainerStateEvent
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonNotSupportedError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
ConfigurationFileError,
|
||||
DockerError,
|
||||
HomeAssistantAPIError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from ..homeassistant.const import WSEvent, WSType
|
||||
from ..jobs.const import JobExecutionLimit
|
||||
from ..homeassistant.const import WSEvent
|
||||
from ..jobs.const import JobConcurrency, JobThrottle
|
||||
from ..jobs.decorator import Job
|
||||
from ..resolution.const import ContextType, IssueType, UnhealthyReason
|
||||
from ..resolution.data import Issue
|
||||
@@ -87,7 +85,7 @@ from ..store.addon import AddonStore
|
||||
from ..utils import check_port
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from ..utils.sentry import capture_exception
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from .const import (
|
||||
WATCHDOG_MAX_ATTEMPTS,
|
||||
WATCHDOG_RETRY_SECONDS,
|
||||
@@ -139,9 +137,7 @@ class Addon(AddonModel):
|
||||
super().__init__(coresys, slug)
|
||||
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||
self._state: AddonState = AddonState.UNKNOWN
|
||||
self._manual_stop: bool = (
|
||||
self.sys_hardware.helper.last_boot != self.sys_config.last_boot
|
||||
)
|
||||
self._manual_stop: bool = False
|
||||
self._listeners: list[EventListener] = []
|
||||
self._startup_event = asyncio.Event()
|
||||
self._startup_task: asyncio.Task | None = None
|
||||
@@ -197,15 +193,12 @@ class Addon(AddonModel):
|
||||
):
|
||||
self.sys_resolution.dismiss_issue(self.device_access_missing_issue)
|
||||
|
||||
self.sys_homeassistant.websocket.send_message(
|
||||
self.sys_homeassistant.websocket.supervisor_event_custom(
|
||||
WSEvent.ADDON,
|
||||
{
|
||||
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
|
||||
ATTR_DATA: {
|
||||
ATTR_EVENT: WSEvent.ADDON,
|
||||
ATTR_SLUG: self.slug,
|
||||
ATTR_STATE: new_state,
|
||||
},
|
||||
}
|
||||
ATTR_SLUG: self.slug,
|
||||
ATTR_STATE: new_state,
|
||||
},
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -215,6 +208,10 @@ class Addon(AddonModel):
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
self._manual_stop = (
|
||||
await self.sys_hardware.helper.last_boot() != self.sys_config.last_boot
|
||||
)
|
||||
|
||||
if self.is_detached:
|
||||
await super().refresh_path_cache()
|
||||
|
||||
@@ -242,7 +239,7 @@ class Addon(AddonModel):
|
||||
await self.instance.install(self.version, default_image, arch=self.arch)
|
||||
|
||||
self.persist[ATTR_IMAGE] = default_image
|
||||
self.save_persist()
|
||||
await self.save_persist()
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
@@ -282,28 +279,28 @@ class Addon(AddonModel):
|
||||
@property
|
||||
def with_icon(self) -> bool:
|
||||
"""Return True if an icon exists."""
|
||||
if self.is_detached:
|
||||
if self.is_detached or not self.addon_store:
|
||||
return super().with_icon
|
||||
return self.addon_store.with_icon
|
||||
|
||||
@property
|
||||
def with_logo(self) -> bool:
|
||||
"""Return True if a logo exists."""
|
||||
if self.is_detached:
|
||||
if self.is_detached or not self.addon_store:
|
||||
return super().with_logo
|
||||
return self.addon_store.with_logo
|
||||
|
||||
@property
|
||||
def with_changelog(self) -> bool:
|
||||
"""Return True if a changelog exists."""
|
||||
if self.is_detached:
|
||||
if self.is_detached or not self.addon_store:
|
||||
return super().with_changelog
|
||||
return self.addon_store.with_changelog
|
||||
|
||||
@property
|
||||
def with_documentation(self) -> bool:
|
||||
"""Return True if a documentation exists."""
|
||||
if self.is_detached:
|
||||
if self.is_detached or not self.addon_store:
|
||||
return super().with_documentation
|
||||
return self.addon_store.with_documentation
|
||||
|
||||
@@ -313,7 +310,7 @@ class Addon(AddonModel):
|
||||
return self._available(self.data_store)
|
||||
|
||||
@property
|
||||
def version(self) -> str | None:
|
||||
def version(self) -> AwesomeVersion:
|
||||
"""Return installed version."""
|
||||
return self.persist[ATTR_VERSION]
|
||||
|
||||
@@ -363,7 +360,7 @@ class Addon(AddonModel):
|
||||
@property
|
||||
def auto_update(self) -> bool:
|
||||
"""Return if auto update is enable."""
|
||||
return self.persist.get(ATTR_AUTO_UPDATE, super().auto_update)
|
||||
return self.persist.get(ATTR_AUTO_UPDATE, False)
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value: bool) -> None:
|
||||
@@ -461,7 +458,7 @@ class Addon(AddonModel):
|
||||
return None
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
def latest_version(self) -> AwesomeVersion:
|
||||
"""Return version of add-on."""
|
||||
return self.data_store[ATTR_VERSION]
|
||||
|
||||
@@ -515,9 +512,8 @@ class Addon(AddonModel):
|
||||
def webui(self) -> str | None:
|
||||
"""Return URL to webui or None."""
|
||||
url = super().webui
|
||||
if not url:
|
||||
if not url or not (webui := RE_WEBUI.match(url)):
|
||||
return None
|
||||
webui = RE_WEBUI.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = webui.group("t_port")
|
||||
@@ -666,16 +662,15 @@ class Addon(AddonModel):
|
||||
"""Is add-on loaded."""
|
||||
return bool(self._listeners)
|
||||
|
||||
def save_persist(self) -> None:
|
||||
async def save_persist(self) -> None:
|
||||
"""Save data of add-on."""
|
||||
self.sys_addons.data.save_data()
|
||||
await self.sys_addons.data.save_data()
|
||||
|
||||
async def watchdog_application(self) -> bool:
|
||||
"""Return True if application is running."""
|
||||
url = super().watchdog
|
||||
if not url:
|
||||
url = self.watchdog_url
|
||||
if not url or not (application := RE_WATCHDOG.match(url)):
|
||||
return True
|
||||
application = RE_WATCHDOG.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = int(application.group("t_port"))
|
||||
@@ -684,8 +679,10 @@ class Addon(AddonModel):
|
||||
s_suffix = application.group("s_suffix") or ""
|
||||
|
||||
# search host port for this docker port
|
||||
if self.host_network:
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
if self.host_network and self.ports:
|
||||
port = self.ports.get(f"{t_port}/tcp")
|
||||
if port is None:
|
||||
port = t_port
|
||||
else:
|
||||
port = t_port
|
||||
|
||||
@@ -719,7 +716,7 @@ class Addon(AddonModel):
|
||||
|
||||
try:
|
||||
options = self.schema.validate(self.options)
|
||||
write_json_file(self.path_options, options)
|
||||
await self.sys_run_in_executor(write_json_file, self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
"Add-on %s has invalid options: %s",
|
||||
@@ -736,8 +733,8 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_unload",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def unload(self) -> None:
|
||||
"""Unload add-on and remove data."""
|
||||
@@ -750,9 +747,12 @@ class Addon(AddonModel):
|
||||
for listener in self._listeners:
|
||||
self.sys_bus.remove_listener(listener)
|
||||
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||
await remove_data(self.path_data)
|
||||
def remove_data_dir():
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info("Removing add-on data folder %s", self.path_data)
|
||||
remove_data(self.path_data)
|
||||
|
||||
await self.sys_run_in_executor(remove_data_dir)
|
||||
|
||||
async def _check_ingress_port(self):
|
||||
"""Assign a ingress port if dynamic port selection is used."""
|
||||
@@ -766,19 +766,25 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_install",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def install(self) -> None:
|
||||
"""Install and setup this addon."""
|
||||
self.sys_addons.data.install(self.addon_store)
|
||||
if not self.addon_store:
|
||||
raise AddonsError("Missing from store, cannot install!")
|
||||
|
||||
await self.sys_addons.data.install(self.addon_store)
|
||||
await self.load()
|
||||
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", self.path_data
|
||||
)
|
||||
self.path_data.mkdir()
|
||||
def setup_data():
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Creating Home Assistant add-on data folder %s", self.path_data
|
||||
)
|
||||
self.path_data.mkdir()
|
||||
|
||||
await self.sys_run_in_executor(setup_data)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self.install_apparmor()
|
||||
@@ -789,7 +795,7 @@ class Addon(AddonModel):
|
||||
self.latest_version, self.addon_store.image, arch=self.arch
|
||||
)
|
||||
except DockerError as err:
|
||||
self.sys_addons.data.uninstall(self)
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
raise AddonsError() from err
|
||||
|
||||
# Add to addon manager
|
||||
@@ -801,8 +807,8 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_uninstall",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def uninstall(
|
||||
self, *, remove_config: bool, remove_image: bool = True
|
||||
@@ -817,14 +823,17 @@ class Addon(AddonModel):
|
||||
|
||||
await self.unload()
|
||||
|
||||
# Remove config if present and requested
|
||||
if self.addon_config_used and remove_config:
|
||||
await remove_data(self.path_config)
|
||||
def cleanup_config_and_audio():
|
||||
# Remove config if present and requested
|
||||
if self.addon_config_used and remove_config:
|
||||
remove_data(self.path_config)
|
||||
|
||||
# Cleanup audio settings
|
||||
if self.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
self.path_pulse.unlink()
|
||||
# Cleanup audio settings
|
||||
if self.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
self.path_pulse.unlink()
|
||||
|
||||
await self.sys_run_in_executor(cleanup_config_and_audio)
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
@@ -837,30 +846,35 @@ class Addon(AddonModel):
|
||||
await self.sys_ingress.update_hass_panel(self)
|
||||
|
||||
# Cleanup Ingress dynamic port assignment
|
||||
need_ingress_token_cleanup = False
|
||||
if self.with_ingress:
|
||||
self.sys_create_task(self.sys_ingress.reload())
|
||||
self.sys_ingress.del_dynamic_port(self.slug)
|
||||
need_ingress_token_cleanup = True
|
||||
await self.sys_ingress.del_dynamic_port(self.slug)
|
||||
|
||||
# Cleanup discovery data
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != self.slug:
|
||||
continue
|
||||
self.sys_discovery.remove(message)
|
||||
await self.sys_discovery.remove(message)
|
||||
|
||||
# Cleanup services data
|
||||
for service in self.sys_services.list_services:
|
||||
if self.slug not in service.active:
|
||||
continue
|
||||
service.del_service_data(self)
|
||||
await service.del_service_data(self)
|
||||
|
||||
# Remove from addon manager
|
||||
self.sys_addons.data.uninstall(self)
|
||||
self.sys_addons.local.pop(self.slug)
|
||||
await self.sys_addons.data.uninstall(self)
|
||||
|
||||
# Cleanup Ingress tokens
|
||||
if need_ingress_token_cleanup:
|
||||
await self.sys_ingress.reload()
|
||||
|
||||
@Job(
|
||||
name="addon_update",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def update(self) -> asyncio.Task | None:
|
||||
"""Update this addon to latest version.
|
||||
@@ -868,6 +882,9 @@ class Addon(AddonModel):
|
||||
Returns a Task that completes when addon has state 'started' (see start)
|
||||
if it was running. Else nothing is returned.
|
||||
"""
|
||||
if not self.addon_store:
|
||||
raise AddonsError("Missing from store, cannot update!")
|
||||
|
||||
old_image = self.image
|
||||
# Cache data to prevent races with other updates to global
|
||||
store = self.addon_store.clone()
|
||||
@@ -883,7 +900,7 @@ class Addon(AddonModel):
|
||||
|
||||
try:
|
||||
_LOGGER.info("Add-on '%s' successfully updated", self.slug)
|
||||
self.sys_addons.data.update(store)
|
||||
await self.sys_addons.data.update(store)
|
||||
await self._check_ingress_port()
|
||||
|
||||
# Cleanup
|
||||
@@ -906,8 +923,8 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_rebuild",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def rebuild(self) -> asyncio.Task | None:
|
||||
"""Rebuild this addons container and image.
|
||||
@@ -924,7 +941,9 @@ class Addon(AddonModel):
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
self.sys_addons.data.update(self.addon_store)
|
||||
if self.addon_store:
|
||||
await self.sys_addons.data.update(self.addon_store)
|
||||
|
||||
await self._check_ingress_port()
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", self.slug)
|
||||
|
||||
@@ -937,22 +956,25 @@ class Addon(AddonModel):
|
||||
)
|
||||
return out
|
||||
|
||||
def write_pulse(self) -> None:
|
||||
async def write_pulse(self) -> None:
|
||||
"""Write asound config to file and return True on success."""
|
||||
pulse_config = self.sys_plugins.audio.pulse_client(
|
||||
input_profile=self.audio_input, output_profile=self.audio_output
|
||||
)
|
||||
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
|
||||
# Write pulse config
|
||||
try:
|
||||
def write_pulse_config():
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(write_pulse_config)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG:
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
_LOGGER.error(
|
||||
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
||||
)
|
||||
@@ -964,7 +986,7 @@ class Addon(AddonModel):
|
||||
async def install_apparmor(self) -> None:
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||
exists_addon = self.path_apparmor.exists()
|
||||
exists_addon = await self.sys_run_in_executor(self.path_apparmor.exists)
|
||||
|
||||
# Nothing to do
|
||||
if not exists_local and not exists_addon:
|
||||
@@ -976,11 +998,21 @@ class Addon(AddonModel):
|
||||
return
|
||||
|
||||
# Need install/update
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
|
||||
profile_file = Path(tmp_folder, "apparmor.txt")
|
||||
tmp_folder: TemporaryDirectory | None = None
|
||||
|
||||
def install_update_profile() -> Path:
|
||||
nonlocal tmp_folder
|
||||
tmp_folder = TemporaryDirectory(dir=self.sys_config.path_tmp)
|
||||
profile_file = Path(tmp_folder.name, "apparmor.txt")
|
||||
adjust_profile(self.slug, self.path_apparmor, profile_file)
|
||||
return profile_file
|
||||
|
||||
try:
|
||||
profile_file = await self.sys_run_in_executor(install_update_profile)
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
finally:
|
||||
if tmp_folder:
|
||||
await self.sys_run_in_executor(tmp_folder.cleanup)
|
||||
|
||||
async def uninstall_apparmor(self) -> None:
|
||||
"""Remove AppArmor profile for Add-on."""
|
||||
@@ -1036,8 +1068,8 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_start",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def start(self) -> asyncio.Task:
|
||||
"""Set options and start add-on.
|
||||
@@ -1052,14 +1084,14 @@ class Addon(AddonModel):
|
||||
|
||||
# Access Token
|
||||
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_persist()
|
||||
await self.save_persist()
|
||||
|
||||
# Options
|
||||
await self.write_options()
|
||||
|
||||
# Sound
|
||||
if self.with_audio:
|
||||
self.write_pulse()
|
||||
await self.write_pulse()
|
||||
|
||||
def _check_addon_config_dir():
|
||||
if self.path_config.is_dir():
|
||||
@@ -1085,8 +1117,8 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_stop",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def stop(self) -> None:
|
||||
"""Stop add-on."""
|
||||
@@ -1099,8 +1131,8 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_restart",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def restart(self) -> asyncio.Task:
|
||||
"""Restart add-on.
|
||||
@@ -1134,13 +1166,13 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_write_stdin",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def write_stdin(self, data) -> None:
|
||||
"""Write data to add-on stdin."""
|
||||
if not self.with_stdin:
|
||||
raise AddonsNotSupportedError(
|
||||
raise AddonNotSupportedError(
|
||||
f"Add-on {self.slug} does not support writing to stdin!", _LOGGER.error
|
||||
)
|
||||
|
||||
@@ -1168,8 +1200,8 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_begin_backup",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def begin_backup(self) -> bool:
|
||||
"""Execute pre commands or stop addon if necessary.
|
||||
@@ -1190,8 +1222,8 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_end_backup",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def end_backup(self) -> asyncio.Task | None:
|
||||
"""Execute post commands or restart addon if necessary.
|
||||
@@ -1207,10 +1239,29 @@ class Addon(AddonModel):
|
||||
await self._backup_command(self.backup_post)
|
||||
return None
|
||||
|
||||
def _is_excluded_by_filter(
|
||||
self, origin_path: Path, arcname: str, item_arcpath: PurePath
|
||||
) -> bool:
|
||||
"""Filter out files from backup based on filters provided by addon developer.
|
||||
|
||||
This tests the dev provided filters against the full path of the file as
|
||||
Supervisor sees them using match. This is done for legacy reasons, testing
|
||||
against the relative path makes more sense and may be changed in the future.
|
||||
"""
|
||||
full_path = origin_path / item_arcpath.relative_to(arcname)
|
||||
|
||||
for exclude in self.backup_exclude:
|
||||
if not full_path.match(exclude):
|
||||
continue
|
||||
_LOGGER.debug("Ignoring %s because of %s", full_path, exclude)
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@Job(
|
||||
name="addon_backup",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def backup(self, tar_file: tarfile.TarFile) -> asyncio.Task | None:
|
||||
"""Backup state of an add-on.
|
||||
@@ -1218,46 +1269,45 @@ class Addon(AddonModel):
|
||||
Returns a Task that completes when addon has state 'started' (see start)
|
||||
for cold backup. Else nothing is returned.
|
||||
"""
|
||||
wait_for_start: Awaitable[None] | None = None
|
||||
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
temp_path = Path(temp)
|
||||
def _addon_backup(
|
||||
store_image: bool,
|
||||
metadata: dict[str, Any],
|
||||
apparmor_profile: str | None,
|
||||
addon_config_used: bool,
|
||||
):
|
||||
"""Start the backup process."""
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
temp_path = Path(temp)
|
||||
|
||||
# store local image
|
||||
if self.need_build:
|
||||
# store local image
|
||||
if store_image:
|
||||
try:
|
||||
self.instance.export_image(temp_path.joinpath("image.tar"))
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
# Store local configs/state
|
||||
try:
|
||||
await self.instance.export_image(temp_path.joinpath("image.tar"))
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.persist,
|
||||
ATTR_SYSTEM: self.data,
|
||||
ATTR_VERSION: self.version,
|
||||
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
||||
}
|
||||
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(temp_path.joinpath("addon.json"), data)
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError(
|
||||
f"Can't save meta for {self.slug}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
profile = temp_path.joinpath("apparmor.txt")
|
||||
try:
|
||||
await self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError as err:
|
||||
write_json_file(temp_path.joinpath("addon.json"), metadata)
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError(
|
||||
"Can't backup AppArmor profile", _LOGGER.error
|
||||
f"Can't save meta for {self.slug}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# write into tarfile
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
# Store AppArmor Profile
|
||||
if apparmor_profile:
|
||||
profile_backup_file = temp_path.joinpath("apparmor.txt")
|
||||
try:
|
||||
self.sys_host.apparmor.backup_profile(
|
||||
apparmor_profile, profile_backup_file
|
||||
)
|
||||
except HostAppArmorError as err:
|
||||
raise AddonsError(
|
||||
"Can't backup AppArmor profile", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# Write tarfile
|
||||
with tar_file as backup:
|
||||
# Backup metadata
|
||||
backup.add(temp, arcname=".")
|
||||
@@ -1266,38 +1316,60 @@ class Addon(AddonModel):
|
||||
atomic_contents_add(
|
||||
backup,
|
||||
self.path_data,
|
||||
excludes=self.backup_exclude,
|
||||
file_filter=partial(
|
||||
self._is_excluded_by_filter, self.path_data, "data"
|
||||
),
|
||||
arcname="data",
|
||||
)
|
||||
|
||||
# Backup config
|
||||
if self.addon_config_used:
|
||||
# Backup config (if used and existing, restore handles this gracefully)
|
||||
if addon_config_used and self.path_config.is_dir():
|
||||
atomic_contents_add(
|
||||
backup,
|
||||
self.path_config,
|
||||
excludes=self.backup_exclude,
|
||||
file_filter=partial(
|
||||
self._is_excluded_by_filter, self.path_config, "config"
|
||||
),
|
||||
arcname="config",
|
||||
)
|
||||
|
||||
is_running = await self.begin_backup()
|
||||
try:
|
||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
raise AddonsError(
|
||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
finally:
|
||||
if is_running:
|
||||
wait_for_start = await self.end_backup()
|
||||
wait_for_start: asyncio.Task | None = None
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.persist,
|
||||
ATTR_SYSTEM: self.data,
|
||||
ATTR_VERSION: self.version,
|
||||
ATTR_STATE: _MAP_ADDON_STATE.get(self.state, self.state),
|
||||
}
|
||||
apparmor_profile = (
|
||||
self.slug if self.sys_host.apparmor.exists(self.slug) else None
|
||||
)
|
||||
|
||||
was_running = await self.begin_backup()
|
||||
try:
|
||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||
await self.sys_run_in_executor(
|
||||
partial(
|
||||
_addon_backup,
|
||||
store_image=self.need_build,
|
||||
metadata=data,
|
||||
apparmor_profile=apparmor_profile,
|
||||
addon_config_used=self.addon_config_used,
|
||||
)
|
||||
)
|
||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||
except (tarfile.TarError, OSError, AddFileError) as err:
|
||||
raise AddonsError(f"Can't write tarfile: {err}", _LOGGER.error) from err
|
||||
finally:
|
||||
if was_running:
|
||||
wait_for_start = await self.end_backup()
|
||||
|
||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||
return wait_for_start
|
||||
|
||||
@Job(
|
||||
name="addon_restore",
|
||||
limit=JobExecutionLimit.GROUP_ONCE,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.GROUP_REJECT,
|
||||
)
|
||||
async def restore(self, tar_file: tarfile.TarFile) -> asyncio.Task | None:
|
||||
"""Restore state of an add-on.
|
||||
@@ -1305,31 +1377,37 @@ class Addon(AddonModel):
|
||||
Returns a Task that completes when addon has state 'started' (see start)
|
||||
if addon is started after restore. Else nothing is returned.
|
||||
"""
|
||||
wait_for_start: Awaitable[None] | None = None
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
# extract backup
|
||||
def _extract_tarfile():
|
||||
"""Extract tar backup."""
|
||||
wait_for_start: asyncio.Task | None = None
|
||||
|
||||
# Extract backup
|
||||
def _extract_tarfile() -> tuple[TemporaryDirectory, dict[str, Any]]:
|
||||
"""Extract tar backup."""
|
||||
tmp = TemporaryDirectory(dir=self.sys_config.path_tmp)
|
||||
try:
|
||||
with tar_file as backup:
|
||||
backup.extractall(
|
||||
path=Path(temp),
|
||||
path=tmp.name,
|
||||
members=secure_path(backup),
|
||||
filter="fully_trusted",
|
||||
)
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
raise AddonsError(
|
||||
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
data = read_json_file(Path(tmp.name, "addon.json"))
|
||||
except:
|
||||
tmp.cleanup()
|
||||
raise
|
||||
|
||||
# Read backup data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError() from err
|
||||
return tmp, data
|
||||
|
||||
try:
|
||||
tmp, data = await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
raise AddonsError(
|
||||
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
try:
|
||||
# Validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_BACKUP(data)
|
||||
@@ -1341,7 +1419,7 @@ class Addon(AddonModel):
|
||||
|
||||
# If available
|
||||
if not self._available(data[ATTR_SYSTEM]):
|
||||
raise AddonsNotSupportedError(
|
||||
raise AddonNotSupportedError(
|
||||
f"Add-on {self.slug} is not available for this platform",
|
||||
_LOGGER.error,
|
||||
)
|
||||
@@ -1349,7 +1427,7 @@ class Addon(AddonModel):
|
||||
# Restore local add-on information
|
||||
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||
restore_image = self._image(data[ATTR_SYSTEM])
|
||||
self.sys_addons.data.restore(
|
||||
await self.sys_addons.data.restore(
|
||||
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
|
||||
)
|
||||
|
||||
@@ -1363,7 +1441,7 @@ class Addon(AddonModel):
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore/Install of image for addon %s", self.slug)
|
||||
|
||||
image_file = Path(temp, "image.tar")
|
||||
image_file = Path(tmp.name, "image.tar")
|
||||
if image_file.is_file():
|
||||
with suppress(DockerError):
|
||||
await self.instance.import_image(image_file)
|
||||
@@ -1382,24 +1460,24 @@ class Addon(AddonModel):
|
||||
# Restore data and config
|
||||
def _restore_data():
|
||||
"""Restore data and config."""
|
||||
temp_data = Path(temp, "data")
|
||||
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
remove_data(self.path_data)
|
||||
if self.path_config.is_dir():
|
||||
remove_data(self.path_config)
|
||||
|
||||
temp_data = Path(tmp.name, "data")
|
||||
if temp_data.is_dir():
|
||||
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
||||
else:
|
||||
self.path_data.mkdir()
|
||||
|
||||
temp_config = Path(temp, "config")
|
||||
temp_config = Path(tmp.name, "config")
|
||||
if temp_config.is_dir():
|
||||
shutil.copytree(temp_config, self.path_config, symlinks=True)
|
||||
elif self.addon_config_used:
|
||||
self.path_config.mkdir()
|
||||
|
||||
_LOGGER.info("Restoring data and config for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
await remove_data(self.path_data)
|
||||
if self.path_config.is_dir():
|
||||
await remove_data(self.path_config)
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
@@ -1408,15 +1486,16 @@ class Addon(AddonModel):
|
||||
) from err
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(temp, "apparmor.txt")
|
||||
if profile_file.exists():
|
||||
profile_file = Path(tmp.name, "apparmor.txt")
|
||||
if await self.sys_run_in_executor(profile_file.exists):
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(
|
||||
self.slug, profile_file
|
||||
)
|
||||
except HostAppArmorError as err:
|
||||
_LOGGER.error(
|
||||
"Can't restore AppArmor profile for add-on %s", self.slug
|
||||
"Can't restore AppArmor profile for add-on %s",
|
||||
self.slug,
|
||||
)
|
||||
raise AddonsError() from err
|
||||
|
||||
@@ -1428,7 +1507,8 @@ class Addon(AddonModel):
|
||||
# Run add-on
|
||||
if data[ATTR_STATE] == AddonState.STARTED:
|
||||
wait_for_start = await self.start()
|
||||
|
||||
finally:
|
||||
await self.sys_run_in_executor(tmp.cleanup)
|
||||
_LOGGER.info("Finished restore for add-on %s", self.slug)
|
||||
return wait_for_start
|
||||
|
||||
@@ -1441,10 +1521,10 @@ class Addon(AddonModel):
|
||||
|
||||
@Job(
|
||||
name="addon_restart_after_problem",
|
||||
limit=JobExecutionLimit.GROUP_THROTTLE_RATE_LIMIT,
|
||||
throttle_period=WATCHDOG_THROTTLE_PERIOD,
|
||||
throttle_max_calls=WATCHDOG_THROTTLE_MAX_CALLS,
|
||||
on_condition=AddonsJobError,
|
||||
throttle=JobThrottle.GROUP_RATE_LIMIT,
|
||||
)
|
||||
async def _restart_after_problem(self, state: ContainerState):
|
||||
"""Restart unhealthy or failed addon."""
|
||||
@@ -1469,7 +1549,7 @@ class Addon(AddonModel):
|
||||
except AddonsError as err:
|
||||
attempts = attempts + 1
|
||||
_LOGGER.error("Watchdog restart of addon %s failed!", self.name)
|
||||
capture_exception(err)
|
||||
await async_capture_exception(err)
|
||||
else:
|
||||
break
|
||||
|
||||
@@ -1521,6 +1601,6 @@ class Addon(AddonModel):
|
||||
|
||||
def refresh_path_cache(self) -> Awaitable[None]:
|
||||
"""Refresh cache of existing paths."""
|
||||
if self.is_detached:
|
||||
if self.is_detached or not self.addon_store:
|
||||
return super().refresh_path_cache()
|
||||
return self.addon_store.refresh_path_cache()
|
||||
|
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from functools import cached_property
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@@ -15,6 +15,7 @@ from ..const import (
|
||||
ATTR_SQUASH,
|
||||
FILE_SUFFIX_CONFIGURATION,
|
||||
META_ADDON,
|
||||
SOCKET_DOCKER,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..docker.interface import MAP_ARCH
|
||||
@@ -23,7 +24,7 @@ from ..utils.common import FileConfiguration, find_one_filetype
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AnyAddon
|
||||
from .manager import AnyAddon
|
||||
|
||||
|
||||
class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
@@ -34,23 +35,36 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
self.coresys: CoreSys = coresys
|
||||
self.addon = addon
|
||||
|
||||
# Search for build file later in executor
|
||||
super().__init__(None, SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def _get_build_file(self) -> Path:
|
||||
"""Get build file.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
try:
|
||||
build_file = find_one_filetype(
|
||||
return find_one_filetype(
|
||||
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
|
||||
)
|
||||
except ConfigurationFileError:
|
||||
build_file = self.addon.path_location / "build.json"
|
||||
return self.addon.path_location / "build.json"
|
||||
|
||||
super().__init__(build_file, SCHEMA_BUILD_CONFIG)
|
||||
async def read_data(self) -> None:
|
||||
"""Load data from file."""
|
||||
if not self._file:
|
||||
self._file = await self.sys_run_in_executor(self._get_build_file)
|
||||
|
||||
def save_data(self):
|
||||
await super().read_data()
|
||||
|
||||
async def save_data(self):
|
||||
"""Ignore save function."""
|
||||
raise RuntimeError()
|
||||
|
||||
@cached_property
|
||||
def arch(self) -> str:
|
||||
"""Return arch of the add-on."""
|
||||
return self.sys_arch.match(self.addon.arch)
|
||||
return self.sys_arch.match([self.addon.arch])
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
@@ -68,13 +82,6 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
)
|
||||
return self._data[ATTR_BUILD_FROM][self.arch]
|
||||
|
||||
@property
|
||||
def dockerfile(self) -> Path:
|
||||
"""Return Dockerfile path."""
|
||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
@property
|
||||
def squash(self) -> bool:
|
||||
"""Return True or False if squash is active."""
|
||||
@@ -90,49 +97,89 @@ class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
"""Return additional Docker labels."""
|
||||
return self._data[ATTR_LABELS]
|
||||
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
def get_dockerfile(self) -> Path:
|
||||
"""Return Dockerfile path.
|
||||
|
||||
Must be run in executor.
|
||||
"""
|
||||
if self.addon.path_location.joinpath(f"Dockerfile.{self.arch}").exists():
|
||||
return self.addon.path_location.joinpath(f"Dockerfile.{self.arch}")
|
||||
return self.addon.path_location.joinpath("Dockerfile")
|
||||
|
||||
async def is_valid(self) -> bool:
|
||||
"""Return true if the build env is valid."""
|
||||
try:
|
||||
|
||||
def build_is_valid() -> bool:
|
||||
return all(
|
||||
[
|
||||
self.addon.path_location.is_dir(),
|
||||
self.dockerfile.is_file(),
|
||||
self.get_dockerfile().is_file(),
|
||||
]
|
||||
)
|
||||
|
||||
try:
|
||||
return await self.sys_run_in_executor(build_is_valid)
|
||||
except HassioArchNotFound:
|
||||
return False
|
||||
|
||||
def get_docker_args(self, version: AwesomeVersion, image: str | None = None):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{image or self.addon.image}:{version!s}",
|
||||
"dockerfile": str(self.dockerfile),
|
||||
"pull": True,
|
||||
"forcerm": not self.sys_dev,
|
||||
"squash": self.squash,
|
||||
"platform": MAP_ARCH[self.arch],
|
||||
"labels": {
|
||||
"io.hass.version": version,
|
||||
"io.hass.arch": self.arch,
|
||||
"io.hass.type": META_ADDON,
|
||||
"io.hass.name": self._fix_label("name"),
|
||||
"io.hass.description": self._fix_label("description"),
|
||||
**self.additional_labels,
|
||||
},
|
||||
"buildargs": {
|
||||
"BUILD_FROM": self.base_image,
|
||||
"BUILD_VERSION": version,
|
||||
"BUILD_ARCH": self.sys_arch.default,
|
||||
**self.additional_args,
|
||||
},
|
||||
def get_docker_args(
|
||||
self, version: AwesomeVersion, image_tag: str
|
||||
) -> dict[str, Any]:
|
||||
"""Create a dict with Docker run args."""
|
||||
dockerfile_path = self.get_dockerfile().relative_to(self.addon.path_location)
|
||||
|
||||
build_cmd = [
|
||||
"docker",
|
||||
"buildx",
|
||||
"build",
|
||||
".",
|
||||
"--tag",
|
||||
image_tag,
|
||||
"--file",
|
||||
str(dockerfile_path),
|
||||
"--platform",
|
||||
MAP_ARCH[self.arch],
|
||||
"--pull",
|
||||
]
|
||||
|
||||
labels = {
|
||||
"io.hass.version": version,
|
||||
"io.hass.arch": self.arch,
|
||||
"io.hass.type": META_ADDON,
|
||||
"io.hass.name": self._fix_label("name"),
|
||||
"io.hass.description": self._fix_label("description"),
|
||||
**self.additional_labels,
|
||||
}
|
||||
|
||||
if self.addon.url:
|
||||
args["labels"]["io.hass.url"] = self.addon.url
|
||||
labels["io.hass.url"] = self.addon.url
|
||||
|
||||
return args
|
||||
for key, value in labels.items():
|
||||
build_cmd.extend(["--label", f"{key}={value}"])
|
||||
|
||||
build_args = {
|
||||
"BUILD_FROM": self.base_image,
|
||||
"BUILD_VERSION": version,
|
||||
"BUILD_ARCH": self.sys_arch.default,
|
||||
**self.additional_args,
|
||||
}
|
||||
|
||||
for key, value in build_args.items():
|
||||
build_cmd.extend(["--build-arg", f"{key}={value}"])
|
||||
|
||||
# The addon path will be mounted from the host system
|
||||
addon_extern_path = self.sys_config.local_to_extern_path(
|
||||
self.addon.path_location
|
||||
)
|
||||
|
||||
return {
|
||||
"command": build_cmd,
|
||||
"volumes": {
|
||||
SOCKET_DOCKER: {"bind": "/var/run/docker.sock", "mode": "rw"},
|
||||
addon_extern_path: {"bind": "/addon", "mode": "ro"},
|
||||
},
|
||||
"working_dir": "/addon",
|
||||
}
|
||||
|
||||
def _fix_label(self, label_name: str) -> str:
|
||||
"""Remove characters they are not supported."""
|
||||
|
@@ -38,7 +38,7 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
"""Return local add-on data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
def install(self, addon: AddonStore) -> None:
|
||||
async def install(self, addon: AddonStore) -> None:
|
||||
"""Set addon as installed."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug] = {
|
||||
@@ -46,26 +46,28 @@ class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_IMAGE: addon.image,
|
||||
}
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def uninstall(self, addon: Addon) -> None:
|
||||
async def uninstall(self, addon: Addon) -> None:
|
||||
"""Set add-on as uninstalled."""
|
||||
self.system.pop(addon.slug, None)
|
||||
self.user.pop(addon.slug, None)
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def update(self, addon: AddonStore) -> None:
|
||||
async def update(self, addon: AddonStore) -> None:
|
||||
"""Update version of add-on."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug].update(
|
||||
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||
)
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
||||
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||
async def restore(
|
||||
self, slug: str, user: Config, system: Config, image: str
|
||||
) -> None:
|
||||
"""Restore data to add-on."""
|
||||
self.user[slug] = deepcopy(user)
|
||||
self.system[slug] = deepcopy(system)
|
||||
|
||||
self.user[slug][ATTR_IMAGE] = image
|
||||
self.save_data()
|
||||
await self.save_data()
|
||||
|
@@ -5,16 +5,18 @@ from collections.abc import Awaitable
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Union
|
||||
from typing import Self, Union
|
||||
|
||||
from attr import evolve
|
||||
|
||||
from supervisor.jobs.const import JobConcurrency
|
||||
|
||||
from ..const import AddonBoot, AddonStartup, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonNotSupportedError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerError,
|
||||
HassioError,
|
||||
@@ -23,7 +25,7 @@ from ..exceptions import (
|
||||
from ..jobs.decorator import Job, JobCondition
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils.sentry import capture_exception
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from .addon import Addon
|
||||
from .const import ADDON_UPDATE_CONDITIONS
|
||||
from .data import AddonsData
|
||||
@@ -67,6 +69,10 @@ class AddonManager(CoreSysAttributes):
|
||||
return self.store.get(addon_slug)
|
||||
return None
|
||||
|
||||
def get_local_only(self, addon_slug: str) -> Addon | None:
|
||||
"""Return an installed add-on from slug."""
|
||||
return self.local.get(addon_slug)
|
||||
|
||||
def from_token(self, token: str) -> Addon | None:
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
@@ -74,6 +80,11 @@ class AddonManager(CoreSysAttributes):
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load_config(self) -> Self:
|
||||
"""Load config in executor."""
|
||||
await self.data.read_data()
|
||||
return self
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
# Refresh cache for all store addons
|
||||
@@ -165,14 +176,17 @@ class AddonManager(CoreSysAttributes):
|
||||
await addon.stop()
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
_LOGGER.warning("Can't stop Add-on %s: %s", addon.slug, err)
|
||||
capture_exception(err)
|
||||
await async_capture_exception(err)
|
||||
|
||||
@Job(
|
||||
name="addon_manager_install",
|
||||
conditions=ADDON_UPDATE_CONDITIONS,
|
||||
on_condition=AddonsJobError,
|
||||
concurrency=JobConcurrency.QUEUE,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
async def install(
|
||||
self, slug: str, *, validation_complete: asyncio.Event | None = None
|
||||
) -> None:
|
||||
"""Install an add-on."""
|
||||
self.sys_jobs.current.reference = slug
|
||||
|
||||
@@ -185,10 +199,15 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
store.validate_availability()
|
||||
|
||||
# If being run in the background, notify caller that validation has completed
|
||||
if validation_complete:
|
||||
validation_complete.set()
|
||||
|
||||
await Addon(self.coresys, slug).install()
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
@Job(name="addon_manager_uninstall")
|
||||
async def uninstall(self, slug: str, *, remove_config: bool = False) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
@@ -213,7 +232,11 @@ class AddonManager(CoreSysAttributes):
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(
|
||||
self, slug: str, backup: bool | None = False
|
||||
self,
|
||||
slug: str,
|
||||
backup: bool | None = False,
|
||||
*,
|
||||
validation_complete: asyncio.Event | None = None,
|
||||
) -> asyncio.Task | None:
|
||||
"""Update add-on.
|
||||
|
||||
@@ -238,6 +261,10 @@ class AddonManager(CoreSysAttributes):
|
||||
# Check if available, Maybe something have changed
|
||||
store.validate_availability()
|
||||
|
||||
# If being run in the background, notify caller that validation has completed
|
||||
if validation_complete:
|
||||
validation_complete.set()
|
||||
|
||||
if backup:
|
||||
await self.sys_backups.do_backup_partial(
|
||||
name=f"addon_{addon.slug}_{addon.version}",
|
||||
@@ -256,7 +283,7 @@ class AddonManager(CoreSysAttributes):
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str) -> asyncio.Task | None:
|
||||
async def rebuild(self, slug: str, *, force: bool = False) -> asyncio.Task | None:
|
||||
"""Perform a rebuild of local build add-on.
|
||||
|
||||
Returns a Task that completes when addon has state 'started' (see addon.start)
|
||||
@@ -279,8 +306,8 @@ class AddonManager(CoreSysAttributes):
|
||||
raise AddonsError(
|
||||
"Version changed, use Update instead Rebuild", _LOGGER.error
|
||||
)
|
||||
if not addon.need_build:
|
||||
raise AddonsNotSupportedError(
|
||||
if not force and not addon.need_build:
|
||||
raise AddonNotSupportedError(
|
||||
"Can't rebuild a image based add-on", _LOGGER.error
|
||||
)
|
||||
|
||||
@@ -308,7 +335,7 @@ class AddonManager(CoreSysAttributes):
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
had_ingress = False
|
||||
had_ingress: bool | None = False
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
@@ -383,7 +410,7 @@ class AddonManager(CoreSysAttributes):
|
||||
reference=addon.slug,
|
||||
suggestions=[SuggestionType.EXECUTE_REPAIR],
|
||||
)
|
||||
capture_exception(err)
|
||||
await async_capture_exception(err)
|
||||
else:
|
||||
add_host_coros.append(
|
||||
self.sys_plugins.dns.add_host(
|
||||
|
@@ -89,7 +89,12 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..docker.const import Capabilities
|
||||
from ..exceptions import AddonsNotSupportedError
|
||||
from ..exceptions import (
|
||||
AddonNotSupportedArchitectureError,
|
||||
AddonNotSupportedError,
|
||||
AddonNotSupportedHomeAssistantVersionError,
|
||||
AddonNotSupportedMachineTypeError,
|
||||
)
|
||||
from ..jobs.const import JOB_GROUP_ADDON
|
||||
from ..jobs.job_group import JobGroup
|
||||
from ..utils import version_is_new_enough
|
||||
@@ -210,18 +215,6 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Return description of add-on."""
|
||||
return self.data[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self) -> str | None:
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
return readme.read_text(encoding="utf-8")
|
||||
|
||||
@property
|
||||
def repository(self) -> str:
|
||||
"""Return repository of add-on."""
|
||||
@@ -306,7 +299,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def watchdog(self) -> str | None:
|
||||
def watchdog_url(self) -> str | None:
|
||||
"""Return URL to for watchdog or None."""
|
||||
return self.data.get(ATTR_WATCHDOG)
|
||||
|
||||
@@ -618,7 +611,7 @@ class AddonModel(JobGroup, ABC):
|
||||
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> list[dict[any, any]] | None:
|
||||
def schema_ui(self) -> list[dict[Any, Any]] | None:
|
||||
"""Create a UI schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
@@ -646,6 +639,21 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Return breaking versions of addon."""
|
||||
return self.data[ATTR_BREAKING_VERSIONS]
|
||||
|
||||
async def long_description(self) -> str | None:
|
||||
"""Return README.md as long_description."""
|
||||
|
||||
def read_readme() -> str | None:
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
return readme.read_text(encoding="utf-8", errors="replace")
|
||||
|
||||
return await self.sys_run_in_executor(read_readme)
|
||||
|
||||
def refresh_path_cache(self) -> Awaitable[None]:
|
||||
"""Refresh cache of existing paths."""
|
||||
|
||||
@@ -661,21 +669,24 @@ class AddonModel(JobGroup, ABC):
|
||||
"""Validate if addon is available for current system."""
|
||||
return self._validate_availability(self.data, logger=_LOGGER.error)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Compare add-on objects."""
|
||||
if not isinstance(other, AddonModel):
|
||||
return False
|
||||
return self.slug == other.slug
|
||||
|
||||
def __hash__(self) -> int:
|
||||
"""Hash for add-on objects."""
|
||||
return hash(self.slug)
|
||||
|
||||
def _validate_availability(
|
||||
self, config, *, logger: Callable[..., None] | None = None
|
||||
) -> None:
|
||||
"""Validate if addon is available for current system."""
|
||||
# Architecture
|
||||
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {self.slug} not supported on this platform, supported architectures: {', '.join(config[ATTR_ARCH])}",
|
||||
logger,
|
||||
raise AddonNotSupportedArchitectureError(
|
||||
logger, slug=self.slug, architectures=config[ATTR_ARCH]
|
||||
)
|
||||
|
||||
# Machine / Hardware
|
||||
@@ -683,9 +694,8 @@ class AddonModel(JobGroup, ABC):
|
||||
if machine and (
|
||||
f"!{self.sys_machine}" in machine or self.sys_machine not in machine
|
||||
):
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {self.slug} not supported on this machine, supported machine types: {', '.join(machine)}",
|
||||
logger,
|
||||
raise AddonNotSupportedMachineTypeError(
|
||||
logger, slug=self.slug, machine_types=machine
|
||||
)
|
||||
|
||||
# Home Assistant
|
||||
@@ -694,16 +704,15 @@ class AddonModel(JobGroup, ABC):
|
||||
if version and not version_is_new_enough(
|
||||
self.sys_homeassistant.version, version
|
||||
):
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {self.slug} not supported on this system, requires Home Assistant version {version} or greater",
|
||||
logger,
|
||||
raise AddonNotSupportedHomeAssistantVersionError(
|
||||
logger, slug=self.slug, version=str(version)
|
||||
)
|
||||
|
||||
def _available(self, config) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
try:
|
||||
self._validate_availability(config)
|
||||
except AddonsNotSupportedError:
|
||||
except AddonNotSupportedError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
@@ -93,15 +93,7 @@ class AddonOptions(CoreSysAttributes):
|
||||
|
||||
typ = self.raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = self._nested_validate_list(typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = self._nested_validate_dict(typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = self._single_validate(typ, value, key)
|
||||
options[key] = self._validate_element(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
f"Type error for option '{key}' in {self._name} ({self._slug})"
|
||||
@@ -111,7 +103,20 @@ class AddonOptions(CoreSysAttributes):
|
||||
return options
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(self, typ: str, value: Any, key: str):
|
||||
def _validate_element(self, typ: Any, value: Any, key: str) -> Any:
|
||||
"""Validate a value against a type specification."""
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
return self._nested_validate_list(typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
return self._nested_validate_dict(typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
return self._single_validate(typ, value, key)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(self, typ: str, value: Any, key: str) -> Any:
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
@@ -137,7 +142,7 @@ class AddonOptions(CoreSysAttributes):
|
||||
) from None
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
range_args: dict[str, Any] = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
@@ -182,13 +187,15 @@ class AddonOptions(CoreSysAttributes):
|
||||
|
||||
# Device valid
|
||||
self.devices.add(device)
|
||||
return str(device.path)
|
||||
return str(value)
|
||||
|
||||
raise vol.Invalid(
|
||||
f"Fatal error for option '{key}' with type '{typ}' in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
def _nested_validate_list(self, typ: Any, data_list: list[Any], key: str):
|
||||
def _nested_validate_list(
|
||||
self, typ: Any, data_list: list[Any], key: str
|
||||
) -> list[Any]:
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
@@ -201,17 +208,13 @@ class AddonOptions(CoreSysAttributes):
|
||||
# Process list
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = self._nested_validate_dict(typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(self._single_validate(typ, element, key))
|
||||
options.append(self._validate_element(typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
def _nested_validate_dict(
|
||||
self, typ: dict[Any, Any], data_dict: dict[Any, Any], key: str
|
||||
):
|
||||
) -> dict[Any, Any]:
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
@@ -231,12 +234,7 @@ class AddonOptions(CoreSysAttributes):
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = self._nested_validate_list(
|
||||
typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = self._single_validate(typ[c_key], c_value, c_key)
|
||||
options[c_key] = self._validate_element(typ[c_key], c_value, c_key)
|
||||
|
||||
self._check_missing_options(typ, options, key)
|
||||
return options
|
||||
@@ -274,18 +272,28 @@ class UiOptions(CoreSysAttributes):
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
self._nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
self._nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
self._single_ui_option(ui_schema, value, key)
|
||||
self._ui_schema_element(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
def _ui_schema_element(
|
||||
self,
|
||||
ui_schema: list[dict[str, Any]],
|
||||
value: str,
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
):
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
assert not multiple
|
||||
self._nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
self._nested_ui_dict(ui_schema, value, key, multiple)
|
||||
else:
|
||||
# normal value
|
||||
self._single_ui_option(ui_schema, value, key, multiple)
|
||||
|
||||
def _single_ui_option(
|
||||
self,
|
||||
ui_schema: list[dict[str, Any]],
|
||||
@@ -377,10 +385,7 @@ class UiOptions(CoreSysAttributes):
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
self._nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
self._single_ui_option(ui_schema, element, key, multiple=True)
|
||||
self._ui_schema_element(ui_schema, element, key, multiple=True)
|
||||
|
||||
def _nested_ui_dict(
|
||||
self,
|
||||
@@ -390,20 +395,16 @@ class UiOptions(CoreSysAttributes):
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {
|
||||
ui_node: dict[str, Any] = {
|
||||
"name": key,
|
||||
"type": "schema",
|
||||
"optional": True,
|
||||
"multiple": multiple,
|
||||
}
|
||||
|
||||
nested_schema = []
|
||||
nested_schema: list[dict[str, Any]] = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
self._nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
self._single_ui_option(nested_schema, c_value, c_key)
|
||||
self._ui_schema_element(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
||||
@@ -413,7 +414,7 @@ def _create_device_filter(str_filter: str) -> dict[str, Any]:
|
||||
"""Generate device Filter."""
|
||||
raw_filter = dict(value.split("=") for value in str_filter.split(";"))
|
||||
|
||||
clean_filter = {}
|
||||
clean_filter: dict[str, Any] = {}
|
||||
for key, value in raw_filter.items():
|
||||
if key == "subsystem":
|
||||
clean_filter[key] = UdevSubsystem(value)
|
||||
|
@@ -2,9 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import subprocess
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
|
||||
@@ -46,6 +46,7 @@ def rating_security(addon: AddonModel) -> int:
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
Capabilities.BPF,
|
||||
Capabilities.CHECKPOINT_RESTORE,
|
||||
Capabilities.DAC_READ_SEARCH,
|
||||
Capabilities.NET_ADMIN,
|
||||
Capabilities.NET_RAW,
|
||||
@@ -85,18 +86,20 @@ def rating_security(addon: AddonModel) -> int:
|
||||
return max(min(8, rating), 1)
|
||||
|
||||
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged.
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
Must be run in executor.
|
||||
"""
|
||||
try:
|
||||
subprocess.run(
|
||||
["rm", "-rf", str(folder)], stdout=subprocess.DEVNULL, text=True, check=True
|
||||
)
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
except subprocess.CalledProcessError as procerr:
|
||||
error_msg = procerr.stderr.strip()
|
||||
else:
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
||||
|
@@ -32,6 +32,7 @@ from ..const import (
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FIELDS,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
@@ -137,7 +138,19 @@ RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||
)
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
SCHEMA_ELEMENT = vol.Schema(
|
||||
vol.Any(
|
||||
vol.Match(RE_SCHEMA_ELEMENT),
|
||||
[
|
||||
# A list may not directly contain another list
|
||||
vol.Any(
|
||||
vol.Match(RE_SCHEMA_ELEMENT),
|
||||
{str: vol.Self},
|
||||
)
|
||||
],
|
||||
{str: vol.Self},
|
||||
)
|
||||
)
|
||||
|
||||
RE_MACHINE = re.compile(
|
||||
r"^!?(?:"
|
||||
@@ -266,10 +279,23 @@ def _migrate_addon_config(protocol=False):
|
||||
volumes = []
|
||||
for entry in config.get(ATTR_MAP, []):
|
||||
if isinstance(entry, dict):
|
||||
# Validate that dict entries have required 'type' field
|
||||
if ATTR_TYPE not in entry:
|
||||
_LOGGER.warning(
|
||||
"Add-on config has invalid map entry missing 'type' field: %s. Skipping invalid entry for %s",
|
||||
entry,
|
||||
name,
|
||||
)
|
||||
continue
|
||||
volumes.append(entry)
|
||||
if isinstance(entry, str):
|
||||
result = RE_VOLUME.match(entry)
|
||||
if not result:
|
||||
_LOGGER.warning(
|
||||
"Add-on config has invalid map entry: %s. Skipping invalid entry for %s",
|
||||
entry,
|
||||
name,
|
||||
)
|
||||
continue
|
||||
volumes.append(
|
||||
{
|
||||
@@ -278,8 +304,8 @@ def _migrate_addon_config(protocol=False):
|
||||
}
|
||||
)
|
||||
|
||||
if volumes:
|
||||
config[ATTR_MAP] = volumes
|
||||
# Always update config to clear potentially malformed ones
|
||||
config[ATTR_MAP] = volumes
|
||||
|
||||
# 2023-10 "config" became "homeassistant" so /config can be used for addon's public config
|
||||
if any(volume[ATTR_TYPE] == MappingType.CONFIG for volume in volumes):
|
||||
@@ -393,20 +419,7 @@ _SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_CODENOTARY): vol.Email(),
|
||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
str: vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
[
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{str: vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])},
|
||||
)
|
||||
],
|
||||
vol.Schema({str: vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}),
|
||||
)
|
||||
}
|
||||
),
|
||||
vol.Schema({str: SCHEMA_ELEMENT}),
|
||||
False,
|
||||
),
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
@@ -442,6 +455,7 @@ SCHEMA_TRANSLATION_CONFIGURATION = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): str,
|
||||
vol.Optional(ATTR_DESCRIPTON): vol.Maybe(str),
|
||||
vol.Optional(ATTR_FIELDS): {str: vol.Self},
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
@@ -1,16 +1,17 @@
|
||||
"""Init file for Supervisor RESTful API."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp import hdrs, web
|
||||
|
||||
from ..const import AddonState
|
||||
from ..const import SUPERVISOR_DOCKER_NAME, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import APIAddonNotInstalled, HostNotSupportedError
|
||||
from ..utils.sentry import capture_exception
|
||||
from ..utils.sentry import async_capture_exception
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
@@ -47,6 +48,14 @@ MAX_CLIENT_SIZE: int = 1024**2 * 16
|
||||
MAX_LINE_SIZE: int = 24570
|
||||
|
||||
|
||||
@dataclass(slots=True, frozen=True)
|
||||
class StaticResourceConfig:
|
||||
"""Configuration for a static resource."""
|
||||
|
||||
prefix: str
|
||||
path: Path
|
||||
|
||||
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Supervisor."""
|
||||
|
||||
@@ -73,12 +82,12 @@ class RestAPI(CoreSysAttributes):
|
||||
self._site: web.TCPSite | None = None
|
||||
|
||||
# share single host API handler for reuse in logging endpoints
|
||||
self._api_host: APIHost | None = None
|
||||
self._api_host: APIHost = APIHost()
|
||||
self._api_host.coresys = coresys
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
self._api_host = APIHost()
|
||||
self._api_host.coresys = self.coresys
|
||||
static_resource_configs: list[StaticResourceConfig] = []
|
||||
|
||||
self._register_addons()
|
||||
self._register_audio()
|
||||
@@ -98,7 +107,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_network()
|
||||
self._register_observer()
|
||||
self._register_os()
|
||||
self._register_panel()
|
||||
static_resource_configs.extend(self._register_panel())
|
||||
self._register_proxy()
|
||||
self._register_resolution()
|
||||
self._register_root()
|
||||
@@ -107,6 +116,17 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_store()
|
||||
self._register_supervisor()
|
||||
|
||||
if static_resource_configs:
|
||||
|
||||
def process_configs() -> list[web.StaticResource]:
|
||||
return [
|
||||
web.StaticResource(config.prefix, config.path)
|
||||
for config in static_resource_configs
|
||||
]
|
||||
|
||||
for resource in await self.sys_run_in_executor(process_configs):
|
||||
self.webapp.router.register_resource(resource)
|
||||
|
||||
await self.start()
|
||||
|
||||
def _register_advanced_logs(self, path: str, syslog_identifier: str):
|
||||
@@ -126,6 +146,14 @@ class RestAPI(CoreSysAttributes):
|
||||
follow=True,
|
||||
),
|
||||
),
|
||||
web.get(
|
||||
f"{path}/logs/latest",
|
||||
partial(
|
||||
self._api_host.advanced_logs,
|
||||
identifier=syslog_identifier,
|
||||
latest=True,
|
||||
),
|
||||
),
|
||||
web.get(
|
||||
f"{path}/logs/boots/{{bootid}}",
|
||||
partial(self._api_host.advanced_logs, identifier=syslog_identifier),
|
||||
@@ -178,6 +206,7 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/host/reload", api_host.reload),
|
||||
web.post("/host/options", api_host.options),
|
||||
web.get("/host/services", api_host.services),
|
||||
web.get("/host/disks/default/usage", api_host.disk_usage),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -217,6 +246,8 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/os/info", api_os.info),
|
||||
web.post("/os/update", api_os.update),
|
||||
web.get("/os/config/swap", api_os.config_swap_info),
|
||||
web.post("/os/config/swap", api_os.config_swap_options),
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||
web.get("/os/datadisk/list", api_os.list_data),
|
||||
@@ -323,6 +354,9 @@ class RestAPI(CoreSysAttributes):
|
||||
api_root.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([web.get("/info", api_root.info)])
|
||||
self.webapp.add_routes([web.post("/reload_updates", api_root.reload_updates)])
|
||||
|
||||
# Discouraged
|
||||
self.webapp.add_routes([web.post("/refresh_updates", api_root.refresh_updates)])
|
||||
self.webapp.add_routes(
|
||||
[web.get("/available_updates", api_root.available_updates)]
|
||||
@@ -401,7 +435,7 @@ class RestAPI(CoreSysAttributes):
|
||||
async def get_supervisor_logs(*args, **kwargs):
|
||||
try:
|
||||
return await self._api_host.advanced_logs_handler(
|
||||
*args, identifier="hassio_supervisor", **kwargs
|
||||
*args, identifier=SUPERVISOR_DOCKER_NAME, **kwargs
|
||||
)
|
||||
except Exception as err: # pylint: disable=broad-exception-caught
|
||||
# Supervisor logs are critical, so catch everything, log the exception
|
||||
@@ -412,8 +446,9 @@ class RestAPI(CoreSysAttributes):
|
||||
if not isinstance(err, HostNotSupportedError):
|
||||
# No need to capture HostNotSupportedError to Sentry, the cause
|
||||
# is known and reported to the user using the resolution center.
|
||||
capture_exception(err)
|
||||
await async_capture_exception(err)
|
||||
kwargs.pop("follow", None) # Follow is not supported for Docker logs
|
||||
kwargs.pop("latest", None) # Latest is not supported for Docker logs
|
||||
return await api_supervisor.logs(*args, **kwargs)
|
||||
|
||||
self.webapp.add_routes(
|
||||
@@ -423,6 +458,10 @@ class RestAPI(CoreSysAttributes):
|
||||
"/supervisor/logs/follow",
|
||||
partial(get_supervisor_logs, follow=True),
|
||||
),
|
||||
web.get(
|
||||
"/supervisor/logs/latest",
|
||||
partial(get_supervisor_logs, latest=True),
|
||||
),
|
||||
web.get("/supervisor/logs/boots/{bootid}", get_supervisor_logs),
|
||||
web.get(
|
||||
"/supervisor/logs/boots/{bootid}/follow",
|
||||
@@ -504,7 +543,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons", api_addons.list),
|
||||
web.get("/addons", api_addons.list_addons),
|
||||
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||
web.post("/addons/{addon}/start", api_addons.start),
|
||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||
@@ -535,6 +574,10 @@ class RestAPI(CoreSysAttributes):
|
||||
"/addons/{addon}/logs/follow",
|
||||
partial(get_addon_logs, follow=True),
|
||||
),
|
||||
web.get(
|
||||
"/addons/{addon}/logs/latest",
|
||||
partial(get_addon_logs, latest=True),
|
||||
),
|
||||
web.get("/addons/{addon}/logs/boots/{bootid}", get_addon_logs),
|
||||
web.get(
|
||||
"/addons/{addon}/logs/boots/{bootid}/follow",
|
||||
@@ -572,7 +615,9 @@ class RestAPI(CoreSysAttributes):
|
||||
web.post("/ingress/session", api_ingress.create_session),
|
||||
web.post("/ingress/validate_session", api_ingress.validate_session),
|
||||
web.get("/ingress/panels", api_ingress.panels),
|
||||
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||
web.route(
|
||||
hdrs.METH_ANY, "/ingress/{token}/{path:.*}", api_ingress.handler
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -583,7 +628,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/backups", api_backups.list),
|
||||
web.get("/backups", api_backups.list_backups),
|
||||
web.get("/backups/info", api_backups.info),
|
||||
web.post("/backups/options", api_backups.options),
|
||||
web.post("/backups/reload", api_backups.reload),
|
||||
@@ -610,7 +655,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/services", api_services.list),
|
||||
web.get("/services", api_services.list_services),
|
||||
web.get("/services/{service}", api_services.get_service),
|
||||
web.post("/services/{service}", api_services.set_service),
|
||||
web.delete("/services/{service}", api_services.del_service),
|
||||
@@ -624,7 +669,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/discovery", api_discovery.list),
|
||||
web.get("/discovery", api_discovery.list_discovery),
|
||||
web.get("/discovery/{uuid}", api_discovery.get_discovery),
|
||||
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
|
||||
web.post("/discovery", api_discovery.set_discovery),
|
||||
@@ -707,6 +752,10 @@ class RestAPI(CoreSysAttributes):
|
||||
"/store/addons/{addon}/documentation",
|
||||
api_store.addons_addon_documentation,
|
||||
),
|
||||
web.get(
|
||||
"/store/addons/{addon}/availability",
|
||||
api_store.addons_addon_availability,
|
||||
),
|
||||
web.post(
|
||||
"/store/addons/{addon}/install", api_store.addons_addon_install
|
||||
),
|
||||
@@ -750,10 +799,9 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
def _register_panel(self) -> None:
|
||||
def _register_panel(self) -> list[StaticResourceConfig]:
|
||||
"""Register panel for Home Assistant."""
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||
return [StaticResourceConfig("/app", Path(__file__).parent.joinpath("panel"))]
|
||||
|
||||
def _register_docker(self) -> None:
|
||||
"""Register docker configuration functions."""
|
||||
@@ -763,6 +811,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/docker/info", api_docker.info),
|
||||
web.post("/docker/options", api_docker.options),
|
||||
web.get("/docker/registries", api_docker.registries),
|
||||
web.post("/docker/registries", api_docker.create_registry),
|
||||
web.delete("/docker/registries/{hostname}", api_docker.remove_registry),
|
||||
|
@@ -3,14 +3,13 @@
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, TypedDict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.manager import AnyAddon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
@@ -37,6 +36,7 @@ from ..const import (
|
||||
ATTR_DNS,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_DOCUMENTATION,
|
||||
ATTR_FORCE,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
@@ -63,7 +63,6 @@ from ..const import (
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_MESSAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_NETWORK_DESCRIPTION,
|
||||
@@ -72,7 +71,6 @@ from ..const import (
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_PWNED,
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
@@ -90,7 +88,6 @@ from ..const import (
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
ATTR_VALID,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_VIDEO,
|
||||
@@ -143,15 +140,25 @@ SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||
SCHEMA_UNINSTALL = vol.Schema(
|
||||
{vol.Optional(ATTR_REMOVE_CONFIG, default=False): vol.Boolean()}
|
||||
)
|
||||
|
||||
SCHEMA_REBUILD = vol.Schema({vol.Optional(ATTR_FORCE, default=False): vol.Boolean()})
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
class OptionsValidateResponse(TypedDict):
|
||||
"""Response object for options validate."""
|
||||
|
||||
message: str
|
||||
valid: bool
|
||||
pwned: bool | None
|
||||
|
||||
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def get_addon_for_request(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception if it doesn't exist."""
|
||||
addon_slug: str = request.match_info.get("addon")
|
||||
addon_slug: str = request.match_info["addon"]
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == "self":
|
||||
@@ -169,7 +176,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list(self, request: web.Request) -> dict[str, Any]:
|
||||
async def list_addons(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = [
|
||||
{
|
||||
@@ -204,7 +211,7 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: AnyAddon = self.get_addon_for_request(request)
|
||||
addon: Addon = self.get_addon_for_request(request)
|
||||
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
@@ -212,7 +219,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_HOSTNAME: addon.hostname,
|
||||
ATTR_DNS: addon.dns,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_LONG_DESCRIPTION: await addon.long_description(),
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
@@ -299,7 +306,7 @@ class APIAddons(CoreSysAttributes):
|
||||
)
|
||||
|
||||
# Validate/Process Body
|
||||
body = await api_validate(addon_schema, request, origin=[ATTR_OPTIONS])
|
||||
body = await api_validate(addon_schema, request)
|
||||
if ATTR_OPTIONS in body:
|
||||
addon.options = body[ATTR_OPTIONS]
|
||||
if ATTR_BOOT in body:
|
||||
@@ -322,7 +329,7 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_WATCHDOG in body:
|
||||
addon.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
addon.save_persist()
|
||||
await addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def sys_options(self, request: web.Request) -> None:
|
||||
@@ -336,13 +343,13 @@ class APIAddons(CoreSysAttributes):
|
||||
if ATTR_SYSTEM_MANAGED_CONFIG_ENTRY in body:
|
||||
addon.system_managed_config_entry = body[ATTR_SYSTEM_MANAGED_CONFIG_ENTRY]
|
||||
|
||||
addon.save_persist()
|
||||
await addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def options_validate(self, request: web.Request) -> None:
|
||||
async def options_validate(self, request: web.Request) -> OptionsValidateResponse:
|
||||
"""Validate user options for add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||
data = OptionsValidateResponse(message="", valid=True, pwned=False)
|
||||
|
||||
options = await request.json(loads=json_loads) or addon.options
|
||||
|
||||
@@ -351,8 +358,8 @@ class APIAddons(CoreSysAttributes):
|
||||
try:
|
||||
options_schema.validate(options)
|
||||
except vol.Invalid as ex:
|
||||
data[ATTR_MESSAGE] = humanize_error(options, ex)
|
||||
data[ATTR_VALID] = False
|
||||
data["message"] = humanize_error(options, ex)
|
||||
data["valid"] = False
|
||||
|
||||
if not self.sys_security.pwned:
|
||||
return data
|
||||
@@ -363,24 +370,24 @@ class APIAddons(CoreSysAttributes):
|
||||
await self.sys_security.verify_secret(secret)
|
||||
continue
|
||||
except PwnedSecret:
|
||||
data[ATTR_PWNED] = True
|
||||
data["pwned"] = True
|
||||
except PwnedError:
|
||||
data[ATTR_PWNED] = None
|
||||
data["pwned"] = None
|
||||
break
|
||||
|
||||
if self.sys_security.force and data[ATTR_PWNED] in (None, True):
|
||||
data[ATTR_VALID] = False
|
||||
if data[ATTR_PWNED] is None:
|
||||
data[ATTR_MESSAGE] = "Error happening on pwned secrets check!"
|
||||
if self.sys_security.force and data["pwned"] in (None, True):
|
||||
data["valid"] = False
|
||||
if data["pwned"] is None:
|
||||
data["message"] = "Error happening on pwned secrets check!"
|
||||
else:
|
||||
data[ATTR_MESSAGE] = "Add-on uses pwned secrets!"
|
||||
data["message"] = "Add-on uses pwned secrets!"
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options_config(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
slug: str = request.match_info.get("addon")
|
||||
slug: str = request.match_info["addon"]
|
||||
if slug != "self":
|
||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||
addon = self.get_addon_for_request(request)
|
||||
@@ -402,7 +409,7 @@ class APIAddons(CoreSysAttributes):
|
||||
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_persist()
|
||||
await addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
@@ -457,7 +464,11 @@ class APIAddons(CoreSysAttributes):
|
||||
async def rebuild(self, request: web.Request) -> None:
|
||||
"""Rebuild local build add-on."""
|
||||
addon = self.get_addon_for_request(request)
|
||||
if start_task := await asyncio.shield(self.sys_addons.rebuild(addon.slug)):
|
||||
body: dict[str, Any] = await api_validate(SCHEMA_REBUILD, request)
|
||||
|
||||
if start_task := await asyncio.shield(
|
||||
self.sys_addons.rebuild(addon.slug, force=body[ATTR_FORCE])
|
||||
):
|
||||
await start_task
|
||||
|
||||
@api_process
|
||||
|
@@ -124,7 +124,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_volume(self, request: web.Request) -> None:
|
||||
"""Set audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_VOLUME, request)
|
||||
|
||||
@@ -137,7 +137,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_mute(self, request: web.Request) -> None:
|
||||
"""Mute audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_MUTE, request)
|
||||
|
||||
@@ -150,7 +150,7 @@ class APIAudio(CoreSysAttributes):
|
||||
@api_process
|
||||
async def set_default(self, request: web.Request) -> None:
|
||||
"""Set audio default stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
source: StreamType = StreamType(request.match_info["source"])
|
||||
body = await api_validate(SCHEMA_DEFAULT, request)
|
||||
|
||||
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))
|
||||
|
@@ -1,19 +1,21 @@
|
||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from aiohttp import BasicAuth, web
|
||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||
from aiohttp.web import FileField
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
from multidict import MultiDictProxy
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import ATTR_NAME, ATTR_PASSWORD, ATTR_USERNAME, REQUEST_FROM
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
from ..utils.json import json_loads
|
||||
from .const import (
|
||||
ATTR_GROUP_IDS,
|
||||
ATTR_IS_ACTIVE,
|
||||
@@ -23,7 +25,7 @@ from .const import (
|
||||
CONTENT_TYPE_JSON,
|
||||
CONTENT_TYPE_URL,
|
||||
)
|
||||
from .utils import api_process, api_validate
|
||||
from .utils import api_process, api_validate, json_loads
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,7 +44,7 @@ REALM_HEADER: dict[str, str] = {
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> bool:
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> Awaitable[bool]:
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
@@ -51,8 +53,11 @@ class APIAuth(CoreSysAttributes):
|
||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||
|
||||
def _process_dict(
|
||||
self, request: web.Request, addon: Addon, data: dict[str, str]
|
||||
) -> bool:
|
||||
self,
|
||||
request: web.Request,
|
||||
addon: Addon,
|
||||
data: dict[str, Any] | MultiDictProxy[str | bytes | FileField],
|
||||
) -> Awaitable[bool]:
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
@@ -60,14 +65,22 @@ class APIAuth(CoreSysAttributes):
|
||||
username = data.get("username") or data.get("user")
|
||||
password = data.get("password")
|
||||
|
||||
return self.sys_auth.check_login(addon, username, password)
|
||||
# Test that we did receive strings and not something else, raise if so
|
||||
try:
|
||||
_ = username.encode and password.encode # type: ignore
|
||||
except AttributeError:
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER) from None
|
||||
|
||||
return self.sys_auth.check_login(
|
||||
addon, cast(str, username), cast(str, password)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def auth(self, request: web.Request) -> bool:
|
||||
"""Process login request."""
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
if not addon.access_auth_api:
|
||||
if not isinstance(addon, Addon) or not addon.access_auth_api:
|
||||
raise APIForbidden("Can't use Home Assistant auth!")
|
||||
|
||||
# BasicAuth
|
||||
@@ -79,13 +92,18 @@ class APIAuth(CoreSysAttributes):
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
data = await request.json(loads=json_loads)
|
||||
return await self._process_dict(request, addon, data)
|
||||
if not await self._process_dict(request, addon, data):
|
||||
raise HTTPUnauthorized()
|
||||
return True
|
||||
|
||||
# URL encoded
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
if not await self._process_dict(request, addon, data):
|
||||
raise HTTPUnauthorized()
|
||||
return True
|
||||
|
||||
# Advertise Basic authentication by default
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER)
|
||||
|
||||
@api_process
|
||||
@@ -99,7 +117,7 @@ class APIAuth(CoreSysAttributes):
|
||||
@api_process
|
||||
async def cache(self, request: web.Request) -> None:
|
||||
"""Process cache reset request."""
|
||||
self.sys_auth.reset_data()
|
||||
await self.sys_auth.reset_data()
|
||||
|
||||
@api_process
|
||||
async def list_users(self, request: web.Request) -> dict[str, list[dict[str, Any]]]:
|
||||
|
@@ -3,17 +3,18 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable
|
||||
import errno
|
||||
from io import IOBase
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp import BodyPartReader, web
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..backups.backup import Backup
|
||||
from ..backups.const import LOCATION_CLOUD_BACKUP, LOCATION_TYPE
|
||||
@@ -26,6 +27,7 @@ from ..const import (
|
||||
ATTR_DATE,
|
||||
ATTR_DAYS_UNTIL_STALE,
|
||||
ATTR_EXTRA,
|
||||
ATTR_FILENAME,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_EXCLUDE_DATABASE,
|
||||
@@ -36,33 +38,33 @@ from ..const import (
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SIZE_BYTES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SUPERVISOR_VERSION,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
REQUEST_FROM,
|
||||
BusEvent,
|
||||
CoreState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden, APINotFound
|
||||
from ..jobs import JobSchedulerOptions
|
||||
from ..mounts.const import MountUsage
|
||||
from ..resolution.const import UnhealthyReason
|
||||
from .const import (
|
||||
ATTR_ADDITIONAL_LOCATIONS,
|
||||
ATTR_BACKGROUND,
|
||||
ATTR_LOCATION_ATTRIBUTES,
|
||||
ATTR_LOCATIONS,
|
||||
ATTR_SIZE_BYTES,
|
||||
CONTENT_TYPE_TAR,
|
||||
)
|
||||
from .utils import api_process, api_validate
|
||||
from .utils import api_process, api_validate, background_task
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
ALL_ADDONS_FLAG = "ALL"
|
||||
|
||||
LOCATION_LOCAL = ".local"
|
||||
|
||||
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
||||
RE_BACKUP_FILENAME = re.compile(r"^[^\\\/]+\.tar$")
|
||||
|
||||
@@ -78,12 +80,23 @@ def _ensure_list(item: Any) -> list:
|
||||
return item
|
||||
|
||||
|
||||
def _convert_local_location(item: str | None) -> str | None:
|
||||
"""Convert local location value."""
|
||||
if item in {LOCATION_LOCAL, ""}:
|
||||
return None
|
||||
return item
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_FOLDERS = vol.All([vol.In(_ALL_FOLDERS)], vol.Unique())
|
||||
SCHEMA_LOCATION = vol.All(vol.Maybe(str), _convert_local_location)
|
||||
SCHEMA_LOCATION_LIST = vol.All(_ensure_list, [SCHEMA_LOCATION], vol.Unique())
|
||||
|
||||
SCHEMA_RESTORE_FULL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LOCATION): vol.Maybe(str),
|
||||
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -91,18 +104,17 @@ SCHEMA_RESTORE_PARTIAL = SCHEMA_RESTORE_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_NAME): str,
|
||||
vol.Optional(ATTR_FILENAME): vol.Match(RE_BACKUP_FILENAME),
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||
vol.Optional(ATTR_COMPRESSED): vol.Maybe(vol.Boolean()),
|
||||
vol.Optional(ATTR_LOCATION): vol.All(
|
||||
_ensure_list, [vol.Maybe(str)], vol.Unique()
|
||||
),
|
||||
vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST,
|
||||
vol.Optional(ATTR_HOMEASSISTANT_EXCLUDE_DATABASE): vol.Boolean(),
|
||||
vol.Optional(ATTR_BACKGROUND, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_EXTRA): dict,
|
||||
@@ -114,30 +126,14 @@ SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||
vol.Optional(ATTR_ADDONS): vol.Or(
|
||||
ALL_ADDONS_FLAG, vol.All([str], vol.Unique())
|
||||
),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(_ALL_FOLDERS)], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): SCHEMA_FOLDERS,
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale,
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_FREEZE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1)),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_REMOVE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_LOCATION): vol.All(
|
||||
_ensure_list, [vol.Maybe(str)], vol.Unique()
|
||||
),
|
||||
}
|
||||
)
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_DAYS_UNTIL_STALE): days_until_stale})
|
||||
SCHEMA_FREEZE = vol.Schema({vol.Optional(ATTR_TIMEOUT): vol.All(int, vol.Range(min=1))})
|
||||
SCHEMA_REMOVE = vol.Schema({vol.Optional(ATTR_LOCATION): SCHEMA_LOCATION_LIST})
|
||||
|
||||
|
||||
class APIBackups(CoreSysAttributes):
|
||||
@@ -150,6 +146,16 @@ class APIBackups(CoreSysAttributes):
|
||||
raise APINotFound("Backup does not exist")
|
||||
return backup
|
||||
|
||||
def _make_location_attributes(self, backup: Backup) -> dict[str, dict[str, Any]]:
|
||||
"""Make location attributes dictionary."""
|
||||
return {
|
||||
loc if loc else LOCATION_LOCAL: {
|
||||
ATTR_PROTECTED: backup.all_locations[loc].protected,
|
||||
ATTR_SIZE_BYTES: backup.all_locations[loc].size_bytes,
|
||||
}
|
||||
for loc in backup.locations
|
||||
}
|
||||
|
||||
def _list_backups(self):
|
||||
"""Return list of backups."""
|
||||
return [
|
||||
@@ -163,6 +169,7 @@ class APIBackups(CoreSysAttributes):
|
||||
ATTR_LOCATION: backup.location,
|
||||
ATTR_LOCATIONS: backup.locations,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
||||
ATTR_COMPRESSED: backup.compressed,
|
||||
ATTR_CONTENT: {
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
||||
@@ -175,7 +182,7 @@ class APIBackups(CoreSysAttributes):
|
||||
]
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
async def list_backups(self, request):
|
||||
"""Return backup list."""
|
||||
data_backups = self._list_backups()
|
||||
|
||||
@@ -201,7 +208,7 @@ class APIBackups(CoreSysAttributes):
|
||||
if ATTR_DAYS_UNTIL_STALE in body:
|
||||
self.sys_backups.days_until_stale = body[ATTR_DAYS_UNTIL_STALE]
|
||||
|
||||
self.sys_backups.save_data()
|
||||
await self.sys_backups.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload(self, _):
|
||||
@@ -234,6 +241,7 @@ class APIBackups(CoreSysAttributes):
|
||||
ATTR_SIZE_BYTES: backup.size_bytes,
|
||||
ATTR_COMPRESSED: backup.compressed,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_LOCATION_ATTRIBUTES: self._make_location_attributes(backup),
|
||||
ATTR_SUPERVISOR_VERSION: backup.supervisor_version,
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
||||
ATTR_LOCATION: backup.location,
|
||||
@@ -248,7 +256,7 @@ class APIBackups(CoreSysAttributes):
|
||||
def _location_to_mount(self, location: str | None) -> LOCATION_TYPE:
|
||||
"""Convert a single location to a mount if possible."""
|
||||
if not location or location == LOCATION_CLOUD_BACKUP:
|
||||
return location
|
||||
return cast(LOCATION_TYPE, location)
|
||||
|
||||
mount = self.sys_mounts.get(location)
|
||||
if mount.usage != MountUsage.BACKUP:
|
||||
@@ -277,36 +285,6 @@ class APIBackups(CoreSysAttributes):
|
||||
f"Location {LOCATION_CLOUD_BACKUP} is only available for Home Assistant"
|
||||
)
|
||||
|
||||
async def _background_backup_task(
|
||||
self, backup_method: Callable, *args, **kwargs
|
||||
) -> tuple[asyncio.Task, str]:
|
||||
"""Start backup task in background and return task and job ID."""
|
||||
event = asyncio.Event()
|
||||
job, backup_task = self.sys_jobs.schedule_job(
|
||||
backup_method, JobSchedulerOptions(), *args, **kwargs
|
||||
)
|
||||
|
||||
async def release_on_freeze(new_state: CoreState):
|
||||
if new_state == CoreState.FREEZE:
|
||||
event.set()
|
||||
|
||||
# Wait for system to get into freeze state before returning
|
||||
# If the backup fails validation it will raise before getting there
|
||||
listener = self.sys_bus.register_event(
|
||||
BusEvent.SUPERVISOR_STATE_CHANGE, release_on_freeze
|
||||
)
|
||||
try:
|
||||
await asyncio.wait(
|
||||
(
|
||||
backup_task,
|
||||
self.sys_create_task(event.wait()),
|
||||
),
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
return (backup_task, job.uuid)
|
||||
finally:
|
||||
self.sys_bus.remove_listener(listener)
|
||||
|
||||
@api_process
|
||||
async def backup_full(self, request: web.Request):
|
||||
"""Create full backup."""
|
||||
@@ -325,8 +303,8 @@ class APIBackups(CoreSysAttributes):
|
||||
body[ATTR_ADDITIONAL_LOCATIONS] = locations
|
||||
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
backup_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_backup_full, **body
|
||||
backup_task, job_id = await background_task(
|
||||
self, self.sys_backups.do_backup_full, **body
|
||||
)
|
||||
|
||||
if background and not backup_task.done():
|
||||
@@ -361,8 +339,8 @@ class APIBackups(CoreSysAttributes):
|
||||
body[ATTR_ADDONS] = list(self.sys_addons.local)
|
||||
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
backup_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_backup_partial, **body
|
||||
backup_task, job_id = await background_task(
|
||||
self, self.sys_backups.do_backup_partial, **body
|
||||
)
|
||||
|
||||
if background and not backup_task.done():
|
||||
@@ -385,8 +363,8 @@ class APIBackups(CoreSysAttributes):
|
||||
request, body.get(ATTR_LOCATION, backup.location)
|
||||
)
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
restore_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_restore_full, backup, **body
|
||||
restore_task, job_id = await background_task(
|
||||
self, self.sys_backups.do_restore_full, backup, **body
|
||||
)
|
||||
|
||||
if background and not restore_task.done() or await restore_task:
|
||||
@@ -405,8 +383,8 @@ class APIBackups(CoreSysAttributes):
|
||||
request, body.get(ATTR_LOCATION, backup.location)
|
||||
)
|
||||
background = body.pop(ATTR_BACKGROUND)
|
||||
restore_task, job_id = await self._background_backup_task(
|
||||
self.sys_backups.do_restore_partial, backup, **body
|
||||
restore_task, job_id = await background_task(
|
||||
self, self.sys_backups.do_restore_partial, backup, **body
|
||||
)
|
||||
|
||||
if background and not restore_task.done() or await restore_task:
|
||||
@@ -440,23 +418,35 @@ class APIBackups(CoreSysAttributes):
|
||||
else:
|
||||
self._validate_cloud_backup_location(request, backup.location)
|
||||
|
||||
return self.sys_backups.remove(backup, locations=locations)
|
||||
await self.sys_backups.remove(backup, locations=locations)
|
||||
|
||||
@api_process
|
||||
async def download(self, request: web.Request):
|
||||
"""Download a backup file."""
|
||||
backup = self._extract_slug(request)
|
||||
# Query will give us '' for /backups, convert value to None
|
||||
location = request.query.get(ATTR_LOCATION, backup.location) or None
|
||||
location = _convert_local_location(
|
||||
request.query.get(ATTR_LOCATION, backup.location)
|
||||
)
|
||||
self._validate_cloud_backup_location(request, location)
|
||||
if location not in backup.all_locations:
|
||||
raise APIError(f"Backup {backup.slug} is not in location {location}")
|
||||
|
||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||
response = web.FileResponse(backup.all_locations[location])
|
||||
filename = backup.all_locations[location].path
|
||||
# If the file is missing, return 404 and trigger reload of location
|
||||
if not await self.sys_run_in_executor(filename.is_file):
|
||||
self.sys_create_task(self.sys_backups.reload(location))
|
||||
return web.Response(status=404)
|
||||
|
||||
response = web.FileResponse(filename)
|
||||
response.content_type = CONTENT_TYPE_TAR
|
||||
|
||||
download_filename = filename.name
|
||||
if download_filename == f"{backup.slug}.tar":
|
||||
download_filename = f"{RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||
response.headers[CONTENT_DISPOSITION] = (
|
||||
f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||
f"attachment; filename={download_filename}"
|
||||
)
|
||||
return response
|
||||
|
||||
@@ -465,49 +455,85 @@ class APIBackups(CoreSysAttributes):
|
||||
"""Upload a backup file."""
|
||||
location: LOCATION_TYPE = None
|
||||
locations: list[LOCATION_TYPE] | None = None
|
||||
tmp_path = self.sys_config.path_tmp
|
||||
|
||||
if ATTR_LOCATION in request.query:
|
||||
location_names: list[str] = request.query.getall(ATTR_LOCATION)
|
||||
self._validate_cloud_backup_location(request, location_names)
|
||||
location_names: list[str] = request.query.getall(ATTR_LOCATION, [])
|
||||
self._validate_cloud_backup_location(
|
||||
request, cast(list[str | None], location_names)
|
||||
)
|
||||
# Convert empty string to None if necessary
|
||||
locations = [
|
||||
self._location_to_mount(location) if location else None
|
||||
self._location_to_mount(location)
|
||||
if _convert_local_location(location)
|
||||
else None
|
||||
for location in location_names
|
||||
]
|
||||
location = locations.pop(0)
|
||||
|
||||
if location and location != LOCATION_CLOUD_BACKUP:
|
||||
tmp_path = location.local_where
|
||||
filename: str | None = None
|
||||
if ATTR_FILENAME in request.query:
|
||||
filename = request.query.get(ATTR_FILENAME)
|
||||
try:
|
||||
vol.Match(RE_BACKUP_FILENAME)(filename)
|
||||
except vol.Invalid as ex:
|
||||
raise APIError(humanize_error(filename, ex)) from None
|
||||
|
||||
with TemporaryDirectory(dir=tmp_path.as_posix()) as temp_dir:
|
||||
tar_file = Path(temp_dir, "backup.tar")
|
||||
tmp_path = await self.sys_backups.get_upload_path_for_location(location)
|
||||
temp_dir: TemporaryDirectory | None = None
|
||||
backup_file_stream: IOBase | None = None
|
||||
|
||||
def open_backup_file() -> Path:
|
||||
nonlocal temp_dir, backup_file_stream
|
||||
temp_dir = TemporaryDirectory(dir=tmp_path.as_posix())
|
||||
tar_file = Path(temp_dir.name, "upload.tar")
|
||||
backup_file_stream = tar_file.open("wb")
|
||||
return tar_file
|
||||
|
||||
def close_backup_file() -> None:
|
||||
if backup_file_stream:
|
||||
# Make sure it got closed, in case of exception. It is safe to
|
||||
# close the file stream twice.
|
||||
backup_file_stream.close()
|
||||
if temp_dir:
|
||||
temp_dir.cleanup()
|
||||
|
||||
try:
|
||||
reader = await request.multipart()
|
||||
contents = await reader.next()
|
||||
try:
|
||||
with tar_file.open("wb") as backup:
|
||||
while True:
|
||||
chunk = await contents.read_chunk()
|
||||
if not chunk:
|
||||
break
|
||||
backup.write(chunk)
|
||||
if not isinstance(contents, BodyPartReader):
|
||||
raise APIError("Improperly formatted upload, could not read backup")
|
||||
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG and location in {
|
||||
LOCATION_CLOUD_BACKUP,
|
||||
None,
|
||||
}:
|
||||
self.sys_resolution.unhealthy = UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
tar_file = await self.sys_run_in_executor(open_backup_file)
|
||||
while chunk := await contents.read_chunk(size=2**16):
|
||||
await self.sys_run_in_executor(
|
||||
cast(IOBase, backup_file_stream).write, chunk
|
||||
)
|
||||
await self.sys_run_in_executor(cast(IOBase, backup_file_stream).close)
|
||||
|
||||
backup = await asyncio.shield(
|
||||
self.sys_backups.import_backup(
|
||||
tar_file, location=location, additional_locations=locations
|
||||
tar_file,
|
||||
filename,
|
||||
location=location,
|
||||
additional_locations=locations,
|
||||
)
|
||||
)
|
||||
except OSError as err:
|
||||
if err.errno == errno.EBADMSG and location in {
|
||||
LOCATION_CLOUD_BACKUP,
|
||||
None,
|
||||
}:
|
||||
self.sys_resolution.add_unhealthy_reason(
|
||||
UnhealthyReason.OSERROR_BAD_MESSAGE
|
||||
)
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
finally:
|
||||
await self.sys_run_in_executor(close_backup_file)
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
|
@@ -47,7 +47,9 @@ ATTR_JOBS = "jobs"
|
||||
ATTR_LLMNR = "llmnr"
|
||||
ATTR_LLMNR_HOSTNAME = "llmnr_hostname"
|
||||
ATTR_LOCAL_ONLY = "local_only"
|
||||
ATTR_LOCATION_ATTRIBUTES = "location_attributes"
|
||||
ATTR_LOCATIONS = "locations"
|
||||
ATTR_MAX_DEPTH = "max_depth"
|
||||
ATTR_MDNS = "mdns"
|
||||
ATTR_MODEL = "model"
|
||||
ATTR_MOUNTS = "mounts"
|
||||
@@ -59,7 +61,6 @@ ATTR_REVISION = "revision"
|
||||
ATTR_SAFE_MODE = "safe_mode"
|
||||
ATTR_SEAT = "seat"
|
||||
ATTR_SIGNED = "signed"
|
||||
ATTR_SIZE_BYTES = "size_bytes"
|
||||
ATTR_STARTUP_TIME = "startup_time"
|
||||
ATTR_STATUS = "status"
|
||||
ATTR_SUBSYSTEM = "subsystem"
|
||||
@@ -80,3 +81,11 @@ class BootSlot(StrEnum):
|
||||
|
||||
A = "A"
|
||||
B = "B"
|
||||
|
||||
|
||||
class DetectBlockingIO(StrEnum):
|
||||
"""Enable/Disable detection for blocking I/O in event loop."""
|
||||
|
||||
OFF = "off"
|
||||
ON = "on"
|
||||
ON_AT_STARTUP = "on-at-startup"
|
||||
|
@@ -1,7 +1,9 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
@@ -16,6 +18,7 @@ from ..const import (
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..discovery import Message
|
||||
from ..exceptions import APIForbidden, APINotFound
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
@@ -32,16 +35,16 @@ SCHEMA_DISCOVERY = vol.Schema(
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
"""Handle RESTful API for discovery functions."""
|
||||
|
||||
def _extract_message(self, request):
|
||||
def _extract_message(self, request: web.Request) -> Message:
|
||||
"""Extract discovery message from URL."""
|
||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||
message = self.sys_discovery.get(request.match_info["uuid"])
|
||||
if not message:
|
||||
raise APINotFound("Discovery message not found")
|
||||
return message
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def list(self, request):
|
||||
async def list_discovery(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Show registered and available services."""
|
||||
# Get available discovery
|
||||
discovery = [
|
||||
@@ -52,12 +55,16 @@ class APIDiscovery(CoreSysAttributes):
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
for message in self.sys_discovery.list_messages
|
||||
if (addon := self.sys_addons.get(message.addon, local_only=True))
|
||||
and addon.state == AddonState.STARTED
|
||||
if (
|
||||
discovered := self.sys_addons.get_local_only(
|
||||
message.addon,
|
||||
)
|
||||
)
|
||||
and discovered.state == AddonState.STARTED
|
||||
]
|
||||
|
||||
# Get available services/add-ons
|
||||
services = {}
|
||||
services: dict[str, list[str]] = {}
|
||||
for addon in self.sys_addons.all:
|
||||
for name in addon.discovery:
|
||||
services.setdefault(name, []).append(addon.slug)
|
||||
@@ -65,7 +72,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
return {ATTR_DISCOVERY: discovery, ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request):
|
||||
async def set_discovery(self, request: web.Request) -> dict[str, str]:
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
addon: Addon = request[REQUEST_FROM]
|
||||
@@ -83,13 +90,13 @@ class APIDiscovery(CoreSysAttributes):
|
||||
)
|
||||
|
||||
# Process discovery message
|
||||
message = self.sys_discovery.send(addon, **body)
|
||||
message = await self.sys_discovery.send(addon, **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def get_discovery(self, request):
|
||||
async def get_discovery(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
@@ -101,7 +108,7 @@ class APIDiscovery(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_discovery(self, request):
|
||||
async def del_discovery(self, request: web.Request) -> None:
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
addon = request[REQUEST_FROM]
|
||||
@@ -110,5 +117,4 @@ class APIDiscovery(CoreSysAttributes):
|
||||
if message.addon != addon.slug:
|
||||
raise APIForbidden("Can't remove discovery message")
|
||||
|
||||
self.sys_discovery.remove(message)
|
||||
return True
|
||||
await self.sys_discovery.remove(message)
|
||||
|
@@ -78,7 +78,7 @@ class APICoreDNS(CoreSysAttributes):
|
||||
if restart_required:
|
||||
self.sys_create_task(self.sys_plugins.dns.restart())
|
||||
|
||||
self.sys_plugins.dns.save_data()
|
||||
await self.sys_plugins.dns.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
|
@@ -6,9 +6,13 @@ from typing import Any
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.resolution.const import ContextType, IssueType, SuggestionType
|
||||
|
||||
from ..const import (
|
||||
ATTR_ENABLE_IPV6,
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_LOGGING,
|
||||
ATTR_MTU,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_REGISTRIES,
|
||||
ATTR_STORAGE,
|
||||
@@ -30,10 +34,65 @@ SCHEMA_DOCKER_REGISTRY = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ENABLE_IPV6): vol.Maybe(vol.Boolean()),
|
||||
vol.Optional(ATTR_MTU): vol.Maybe(vol.All(int, vol.Range(min=68, max=65535))),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIDocker(CoreSysAttributes):
|
||||
"""Handle RESTful API for Docker configuration."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request):
|
||||
"""Get docker info."""
|
||||
data_registries = {}
|
||||
for hostname, registry in self.sys_docker.config.registries.items():
|
||||
data_registries[hostname] = {
|
||||
ATTR_USERNAME: registry[ATTR_USERNAME],
|
||||
}
|
||||
return {
|
||||
ATTR_VERSION: self.sys_docker.info.version,
|
||||
ATTR_ENABLE_IPV6: self.sys_docker.config.enable_ipv6,
|
||||
ATTR_MTU: self.sys_docker.config.mtu,
|
||||
ATTR_STORAGE: self.sys_docker.info.storage,
|
||||
ATTR_LOGGING: self.sys_docker.info.logging,
|
||||
ATTR_REGISTRIES: data_registries,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set docker options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
reboot_required = False
|
||||
|
||||
if (
|
||||
ATTR_ENABLE_IPV6 in body
|
||||
and self.sys_docker.config.enable_ipv6 != body[ATTR_ENABLE_IPV6]
|
||||
):
|
||||
self.sys_docker.config.enable_ipv6 = body[ATTR_ENABLE_IPV6]
|
||||
reboot_required = True
|
||||
|
||||
if ATTR_MTU in body and self.sys_docker.config.mtu != body[ATTR_MTU]:
|
||||
self.sys_docker.config.mtu = body[ATTR_MTU]
|
||||
reboot_required = True
|
||||
|
||||
if reboot_required:
|
||||
_LOGGER.info(
|
||||
"Host system reboot required to apply Docker configuration changes"
|
||||
)
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
suggestions=[SuggestionType.EXECUTE_REBOOT],
|
||||
)
|
||||
|
||||
await self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def registries(self, request) -> dict[str, Any]:
|
||||
"""Return the list of registries."""
|
||||
@@ -53,7 +112,7 @@ class APIDocker(CoreSysAttributes):
|
||||
for hostname, registry in body.items():
|
||||
self.sys_docker.config.registries[hostname] = registry
|
||||
|
||||
self.sys_docker.config.save_data()
|
||||
await self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def remove_registry(self, request: web.Request):
|
||||
@@ -63,19 +122,4 @@ class APIDocker(CoreSysAttributes):
|
||||
raise APINotFound(f"Hostname {hostname} does not exist in registries")
|
||||
|
||||
del self.sys_docker.config.registries[hostname]
|
||||
self.sys_docker.config.save_data()
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request):
|
||||
"""Get docker info."""
|
||||
data_registries = {}
|
||||
for hostname, registry in self.sys_docker.config.registries.items():
|
||||
data_registries[hostname] = {
|
||||
ATTR_USERNAME: registry[ATTR_USERNAME],
|
||||
}
|
||||
return {
|
||||
ATTR_VERSION: self.sys_docker.info.version,
|
||||
ATTR_STORAGE: self.sys_docker.info.storage,
|
||||
ATTR_LOGGING: self.sys_docker.info.logging,
|
||||
ATTR_REGISTRIES: data_registries,
|
||||
}
|
||||
await self.sys_docker.config.save_data()
|
||||
|
@@ -68,7 +68,10 @@ def filesystem_struct(fs_block: UDisks2Block) -> dict[str, Any]:
|
||||
ATTR_NAME: fs_block.id_label,
|
||||
ATTR_SYSTEM: fs_block.hint_system,
|
||||
ATTR_MOUNT_POINTS: [
|
||||
str(mount_point) for mount_point in fs_block.filesystem.mount_points
|
||||
str(mount_point)
|
||||
for mount_point in (
|
||||
fs_block.filesystem.mount_points if fs_block.filesystem else []
|
||||
)
|
||||
],
|
||||
}
|
||||
|
||||
|
@@ -20,6 +20,7 @@ from ..const import (
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_JOB_ID,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
@@ -37,8 +38,8 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIDBMigrationInProgress, APIError
|
||||
from ..validate import docker_image, network_port, version_tag
|
||||
from .const import ATTR_FORCE, ATTR_SAFE_MODE
|
||||
from .utils import api_process, api_validate
|
||||
from .const import ATTR_BACKGROUND, ATTR_FORCE, ATTR_SAFE_MODE
|
||||
from .utils import api_process, api_validate, background_task
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -61,6 +62,7 @@ SCHEMA_UPDATE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): version_tag,
|
||||
vol.Optional(ATTR_BACKUP): bool,
|
||||
vol.Optional(ATTR_BACKGROUND, default=False): bool,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -118,7 +120,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.set_image(body[ATTR_IMAGE])
|
||||
self.sys_homeassistant.override_image = (
|
||||
self.sys_homeassistant.image != self.sys_homeassistant.default_image
|
||||
)
|
||||
@@ -149,7 +151,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
ATTR_BACKUPS_EXCLUDE_DATABASE
|
||||
]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
await self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> dict[Any, str]:
|
||||
@@ -170,18 +172,24 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
async def update(self, request: web.Request) -> dict[str, str] | None:
|
||||
"""Update Home Assistant."""
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
await self._check_offline_migration()
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_homeassistant.core.update(
|
||||
version=body.get(ATTR_VERSION, self.sys_homeassistant.latest_version),
|
||||
backup=body.get(ATTR_BACKUP),
|
||||
)
|
||||
background = body[ATTR_BACKGROUND]
|
||||
update_task, job_id = await background_task(
|
||||
self,
|
||||
self.sys_homeassistant.core.update,
|
||||
version=body.get(ATTR_VERSION, self.sys_homeassistant.latest_version),
|
||||
backup=body.get(ATTR_BACKUP),
|
||||
)
|
||||
|
||||
if background and not update_task.done():
|
||||
return {ATTR_JOB_ID: job_id}
|
||||
|
||||
return await update_task
|
||||
|
||||
@api_process
|
||||
async def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop Home Assistant."""
|
||||
|
@@ -2,9 +2,17 @@
|
||||
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import json
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientConnectionResetError, web
|
||||
from aiohttp import (
|
||||
ClientConnectionResetError,
|
||||
ClientError,
|
||||
ClientPayloadError,
|
||||
ClientTimeout,
|
||||
web,
|
||||
)
|
||||
from aiohttp.hdrs import ACCEPT, RANGE
|
||||
import voluptuous as vol
|
||||
from voluptuous.error import CoerceInvalid
|
||||
@@ -36,6 +44,7 @@ from ..host.const import (
|
||||
LogFormat,
|
||||
LogFormatter,
|
||||
)
|
||||
from ..host.logs import SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX
|
||||
from ..utils.systemd_journal import journal_logs_reader
|
||||
from .const import (
|
||||
ATTR_AGENT_VERSION,
|
||||
@@ -49,6 +58,7 @@ from .const import (
|
||||
ATTR_FORCE,
|
||||
ATTR_IDENTIFIERS,
|
||||
ATTR_LLMNR_HOSTNAME,
|
||||
ATTR_MAX_DEPTH,
|
||||
ATTR_STARTUP_TIME,
|
||||
ATTR_USE_NTP,
|
||||
ATTR_VIRTUALIZATION,
|
||||
@@ -98,10 +108,10 @@ class APIHost(CoreSysAttributes):
|
||||
ATTR_VIRTUALIZATION: self.sys_host.info.virtualization,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||
ATTR_DISK_TOTAL: self.sys_host.info.total_space,
|
||||
ATTR_DISK_USED: self.sys_host.info.used_space,
|
||||
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
|
||||
ATTR_DISK_FREE: await self.sys_host.info.free_space(),
|
||||
ATTR_DISK_TOTAL: await self.sys_host.info.total_space(),
|
||||
ATTR_DISK_USED: await self.sys_host.info.used_space(),
|
||||
ATTR_DISK_LIFE_TIME: await self.sys_host.info.disk_life_time(),
|
||||
ATTR_FEATURES: self.sys_host.features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_LLMNR_HOSTNAME: self.sys_host.info.llmnr_hostname,
|
||||
@@ -191,27 +201,43 @@ class APIHost(CoreSysAttributes):
|
||||
return possible_offset
|
||||
|
||||
async def advanced_logs_handler(
|
||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||
self,
|
||||
request: web.Request,
|
||||
identifier: str | None = None,
|
||||
follow: bool = False,
|
||||
latest: bool = False,
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs."""
|
||||
log_formatter = LogFormatter.PLAIN
|
||||
params = {}
|
||||
params: dict[str, Any] = {}
|
||||
if identifier:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = identifier
|
||||
elif IDENTIFIER in request.match_info:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info.get(IDENTIFIER)
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = request.match_info[IDENTIFIER]
|
||||
else:
|
||||
params[PARAM_SYSLOG_IDENTIFIER] = self.sys_host.logs.default_identifiers
|
||||
# host logs should be always verbose, no matter what Accept header is used
|
||||
log_formatter = LogFormatter.VERBOSE
|
||||
|
||||
if BOOTID in request.match_info:
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(
|
||||
request.match_info.get(BOOTID)
|
||||
)
|
||||
params[PARAM_BOOT_ID] = await self._get_boot_id(request.match_info[BOOTID])
|
||||
if follow:
|
||||
params[PARAM_FOLLOW] = ""
|
||||
|
||||
if latest:
|
||||
if not identifier:
|
||||
raise APIError(
|
||||
"Latest logs can only be fetched for a specific identifier."
|
||||
)
|
||||
|
||||
try:
|
||||
epoch = await self._get_container_last_epoch(identifier)
|
||||
params["CONTAINER_LOG_EPOCH"] = epoch
|
||||
except HostLogError as err:
|
||||
raise APIError(
|
||||
f"Cannot determine CONTAINER_LOG_EPOCH of {identifier}, latest logs not available."
|
||||
) from err
|
||||
|
||||
if ACCEPT in request.headers and request.headers[ACCEPT] not in [
|
||||
CONTENT_TYPE_TEXT,
|
||||
CONTENT_TYPE_X_LOG,
|
||||
@@ -239,13 +265,13 @@ class APIHost(CoreSysAttributes):
|
||||
# return 2 lines at minimum.
|
||||
lines = max(2, lines)
|
||||
# entries=cursor[[:num_skip]:num_entries]
|
||||
range_header = f"entries=:-{lines-1}:{'' if follow else lines}"
|
||||
range_header = f"entries=:-{lines - 1}:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX if follow else lines}"
|
||||
elif latest:
|
||||
range_header = f"entries=:0:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX}"
|
||||
elif RANGE in request.headers:
|
||||
range_header = request.headers.get(RANGE)
|
||||
range_header = request.headers[RANGE]
|
||||
else:
|
||||
range_header = (
|
||||
f"entries=:-{DEFAULT_LINES-1}:{'' if follow else DEFAULT_LINES}"
|
||||
)
|
||||
range_header = f"entries=:-{DEFAULT_LINES - 1}:{SYSTEMD_JOURNAL_GATEWAYD_LINES_MAX if follow else DEFAULT_LINES}"
|
||||
|
||||
async with self.sys_host.logs.journald_logs(
|
||||
params=params, range_header=range_header, accept=LogFormat.JOURNAL
|
||||
@@ -255,16 +281,31 @@ class APIHost(CoreSysAttributes):
|
||||
response.content_type = CONTENT_TYPE_TEXT
|
||||
headers_returned = False
|
||||
async for cursor, line in journal_logs_reader(resp, log_formatter):
|
||||
if not headers_returned:
|
||||
if cursor:
|
||||
response.headers["X-First-Cursor"] = cursor
|
||||
await response.prepare(request)
|
||||
headers_returned = True
|
||||
# When client closes the connection while reading busy logs, we
|
||||
# sometimes get this exception. It should be safe to ignore it.
|
||||
with suppress(ClientConnectionResetError):
|
||||
try:
|
||||
if not headers_returned:
|
||||
if cursor:
|
||||
response.headers["X-First-Cursor"] = cursor
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
headers_returned = True
|
||||
await response.write(line.encode("utf-8") + b"\n")
|
||||
except ConnectionResetError as ex:
|
||||
except ClientConnectionResetError as err:
|
||||
# When client closes the connection while reading busy logs, we
|
||||
# sometimes get this exception. It should be safe to ignore it.
|
||||
_LOGGER.debug(
|
||||
"ClientConnectionResetError raised when returning journal logs: %s",
|
||||
err,
|
||||
)
|
||||
break
|
||||
except ConnectionError as err:
|
||||
_LOGGER.warning(
|
||||
"%s raised when returning journal logs: %s",
|
||||
type(err).__name__,
|
||||
err,
|
||||
)
|
||||
break
|
||||
except (ConnectionResetError, ClientPayloadError) as ex:
|
||||
# ClientPayloadError is most likely caused by the closing the connection
|
||||
raise APIError(
|
||||
"Connection reset when trying to fetch data from systemd-journald."
|
||||
) from ex
|
||||
@@ -272,7 +313,81 @@ class APIHost(CoreSysAttributes):
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT, error_type=CONTENT_TYPE_TEXT)
|
||||
async def advanced_logs(
|
||||
self, request: web.Request, identifier: str | None = None, follow: bool = False
|
||||
self,
|
||||
request: web.Request,
|
||||
identifier: str | None = None,
|
||||
follow: bool = False,
|
||||
latest: bool = False,
|
||||
) -> web.StreamResponse:
|
||||
"""Return systemd-journald logs. Wrapped as standard API handler."""
|
||||
return await self.advanced_logs_handler(request, identifier, follow)
|
||||
return await self.advanced_logs_handler(request, identifier, follow, latest)
|
||||
|
||||
@api_process
|
||||
async def disk_usage(self, request: web.Request) -> dict:
|
||||
"""Return a breakdown of storage usage for the system."""
|
||||
|
||||
max_depth = request.query.get(ATTR_MAX_DEPTH, 1)
|
||||
try:
|
||||
max_depth = int(max_depth)
|
||||
except ValueError:
|
||||
max_depth = 1
|
||||
|
||||
disk = self.sys_hardware.disk
|
||||
|
||||
total, used, _ = await self.sys_run_in_executor(
|
||||
disk.disk_usage, self.sys_config.path_supervisor
|
||||
)
|
||||
|
||||
known_paths = await self.sys_run_in_executor(
|
||||
disk.get_dir_sizes,
|
||||
{
|
||||
"addons_data": self.sys_config.path_addons_data,
|
||||
"addons_config": self.sys_config.path_addon_configs,
|
||||
"media": self.sys_config.path_media,
|
||||
"share": self.sys_config.path_share,
|
||||
"backup": self.sys_config.path_backup,
|
||||
"ssl": self.sys_config.path_ssl,
|
||||
"homeassistant": self.sys_config.path_homeassistant,
|
||||
},
|
||||
max_depth,
|
||||
)
|
||||
return {
|
||||
# this can be the disk/partition ID in the future
|
||||
"id": "root",
|
||||
"label": "Root",
|
||||
"total_bytes": total,
|
||||
"used_bytes": used,
|
||||
"children": [
|
||||
{
|
||||
"id": "system",
|
||||
"label": "System",
|
||||
"used_bytes": used
|
||||
- sum(path["used_bytes"] for path in known_paths),
|
||||
},
|
||||
*known_paths,
|
||||
],
|
||||
}
|
||||
|
||||
async def _get_container_last_epoch(self, identifier: str) -> str | None:
|
||||
"""Get Docker's internal log epoch of the latest log entry for the given identifier."""
|
||||
try:
|
||||
async with self.sys_host.logs.journald_logs(
|
||||
params={"CONTAINER_NAME": identifier},
|
||||
range_header="entries=:-1:2", # -1 = next to the last entry
|
||||
accept=LogFormat.JSON,
|
||||
timeout=ClientTimeout(total=10),
|
||||
) as resp:
|
||||
text = await resp.text()
|
||||
except (ClientError, TimeoutError) as err:
|
||||
raise HostLogError(
|
||||
"Could not get last container epoch from systemd-journal-gatewayd",
|
||||
_LOGGER.error,
|
||||
) from err
|
||||
|
||||
try:
|
||||
return json.loads(text.strip().split("\n")[-1])["CONTAINER_LOG_EPOCH"]
|
||||
except (json.JSONDecodeError, KeyError, IndexError) as err:
|
||||
raise HostLogError(
|
||||
f"Failed to parse CONTAINER_LOG_EPOCH of {identifier} container, got: {text}",
|
||||
_LOGGER.error,
|
||||
) from err
|
||||
|
@@ -83,7 +83,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info.get("token")
|
||||
token = request.match_info["token"]
|
||||
|
||||
# Find correct add-on
|
||||
addon = self.sys_ingress.get(token)
|
||||
@@ -132,7 +132,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def validate_session(self, request: web.Request) -> dict[str, Any]:
|
||||
async def validate_session(self, request: web.Request) -> None:
|
||||
"""Validate session and extending how long it's valid for."""
|
||||
data = await api_validate(VALIDATE_SESSION_DATA, request)
|
||||
|
||||
@@ -147,14 +147,14 @@ class APIIngress(CoreSysAttributes):
|
||||
"""Route data to Supervisor ingress service."""
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
session = request.cookies.get(COOKIE_INGRESS, "")
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path")
|
||||
path = request.match_info.get("path", "")
|
||||
session_data = self.sys_ingress.get_session_data(session)
|
||||
try:
|
||||
# Websocket
|
||||
@@ -183,7 +183,7 @@ class APIIngress(CoreSysAttributes):
|
||||
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||
]
|
||||
else:
|
||||
req_protocols = ()
|
||||
req_protocols = []
|
||||
|
||||
ws_server = web.WebSocketResponse(
|
||||
protocols=req_protocols, autoclose=False, autoping=False
|
||||
@@ -199,21 +199,25 @@ class APIIngress(CoreSysAttributes):
|
||||
url = f"{url}?{request.query_string}"
|
||||
|
||||
# Start proxy
|
||||
async with self.sys_websession.ws_connect(
|
||||
url,
|
||||
headers=source_header,
|
||||
protocols=req_protocols,
|
||||
autoclose=False,
|
||||
autoping=False,
|
||||
) as ws_client:
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_create_task(_websocket_forward(ws_server, ws_client)),
|
||||
self.sys_create_task(_websocket_forward(ws_client, ws_server)),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
try:
|
||||
_LOGGER.debug("Proxing WebSocket to %s, upstream url: %s", addon.slug, url)
|
||||
async with self.sys_websession.ws_connect(
|
||||
url,
|
||||
headers=source_header,
|
||||
protocols=req_protocols,
|
||||
autoclose=False,
|
||||
autoping=False,
|
||||
) as ws_client:
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_create_task(_websocket_forward(ws_server, ws_client)),
|
||||
self.sys_create_task(_websocket_forward(ws_client, ws_server)),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
except TimeoutError:
|
||||
_LOGGER.warning("WebSocket proxy to %s timed out", addon.slug)
|
||||
|
||||
return ws_server
|
||||
|
||||
@@ -277,14 +281,16 @@ class APIIngress(CoreSysAttributes):
|
||||
response.content_type = content_type
|
||||
|
||||
try:
|
||||
response.headers["X-Accel-Buffering"] = "no"
|
||||
await response.prepare(request)
|
||||
async for data in result.content.iter_chunked(4096):
|
||||
async for data, _ in result.content.iter_chunks():
|
||||
await response.write(data)
|
||||
|
||||
except (
|
||||
aiohttp.ClientError,
|
||||
aiohttp.ClientPayloadError,
|
||||
ConnectionResetError,
|
||||
ConnectionError,
|
||||
) as err:
|
||||
_LOGGER.error("Stream error with %s: %s", url, err)
|
||||
|
||||
@@ -308,9 +314,9 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: Addon, session_data: IngressSessionData | None
|
||||
) -> CIMultiDict | dict[str, str]:
|
||||
) -> CIMultiDict[str]:
|
||||
"""Create initial header."""
|
||||
headers = {}
|
||||
headers = CIMultiDict[str]()
|
||||
|
||||
if session_data is not None:
|
||||
headers[HEADER_REMOTE_USER_ID] = session_data.user.id
|
||||
@@ -336,19 +342,20 @@ def _init_header(
|
||||
istr(HEADER_REMOTE_USER_DISPLAY_NAME),
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
headers.add(name, value)
|
||||
|
||||
# Update X-Forwarded-For
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
if request.transport:
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
|
||||
def _response_header(response: aiohttp.ClientResponse) -> CIMultiDict[str]:
|
||||
"""Create response header."""
|
||||
headers = {}
|
||||
headers = CIMultiDict[str]()
|
||||
|
||||
for name, value in response.headers.items():
|
||||
if name in (
|
||||
@@ -358,7 +365,7 @@ def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
|
||||
hdrs.CONTENT_ENCODING,
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
headers.add(name, value)
|
||||
|
||||
return headers
|
||||
|
||||
@@ -384,9 +391,9 @@ async def _websocket_forward(ws_from, ws_to):
|
||||
elif msg.type == aiohttp.WSMsgType.BINARY:
|
||||
await ws_to.send_bytes(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.PING:
|
||||
await ws_to.ping()
|
||||
await ws_to.ping(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.PONG:
|
||||
await ws_to.pong()
|
||||
await ws_to.pong(msg.data)
|
||||
elif ws_to.closed:
|
||||
await ws_to.close(code=ws_to.close_code, message=msg.extra)
|
||||
except RuntimeError:
|
||||
|
@@ -26,14 +26,21 @@ class APIJobs(CoreSysAttributes):
|
||||
def _extract_job(self, request: web.Request) -> SupervisorJob:
|
||||
"""Extract job from request or raise."""
|
||||
try:
|
||||
return self.sys_jobs.get_job(request.match_info.get("uuid"))
|
||||
return self.sys_jobs.get_job(request.match_info["uuid"])
|
||||
except JobNotFound:
|
||||
raise APINotFound("Job does not exist") from None
|
||||
|
||||
def _list_jobs(self, start: SupervisorJob | None = None) -> list[dict[str, Any]]:
|
||||
"""Return current job tree."""
|
||||
"""Return current job tree.
|
||||
|
||||
Jobs are added to cache as they are created so by default they are in oldest to newest.
|
||||
This is correct ordering for child jobs as it makes logical sense to present those in
|
||||
the order they occurred within the parent. For the list as a whole, sort from newest
|
||||
to oldest as its likely any client is most interested in the newer ones.
|
||||
"""
|
||||
# Initially sort oldest to newest so all child lists end up in correct order
|
||||
jobs_by_parent: dict[str | None, list[SupervisorJob]] = {}
|
||||
for job in self.sys_jobs.jobs:
|
||||
for job in sorted(self.sys_jobs.jobs):
|
||||
if job.internal:
|
||||
continue
|
||||
|
||||
@@ -42,11 +49,15 @@ class APIJobs(CoreSysAttributes):
|
||||
else:
|
||||
jobs_by_parent[job.parent_id].append(job)
|
||||
|
||||
# After parent-child organization, sort the root jobs only from newest to oldest
|
||||
job_list: list[dict[str, Any]] = []
|
||||
queue: list[tuple[list[dict[str, Any]], SupervisorJob]] = (
|
||||
[(job_list, start)]
|
||||
if start
|
||||
else [(job_list, job) for job in jobs_by_parent.get(None, [])]
|
||||
else [
|
||||
(job_list, job)
|
||||
for job in sorted(jobs_by_parent.get(None, []), reverse=True)
|
||||
]
|
||||
)
|
||||
|
||||
while queue:
|
||||
@@ -60,7 +71,10 @@ class APIJobs(CoreSysAttributes):
|
||||
|
||||
if current_job.uuid in jobs_by_parent:
|
||||
queue.extend(
|
||||
[(child_jobs, job) for job in jobs_by_parent.get(current_job.uuid)]
|
||||
[
|
||||
(child_jobs, job)
|
||||
for job in jobs_by_parent.get(current_job.uuid, [])
|
||||
]
|
||||
)
|
||||
|
||||
return job_list
|
||||
@@ -81,14 +95,14 @@ class APIJobs(CoreSysAttributes):
|
||||
if ATTR_IGNORE_CONDITIONS in body:
|
||||
self.sys_jobs.ignore_conditions = body[ATTR_IGNORE_CONDITIONS]
|
||||
|
||||
self.sys_jobs.save_data()
|
||||
await self.sys_jobs.save_data()
|
||||
|
||||
await self.sys_resolution.evaluate.evaluate_system()
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Reset options for JobManager."""
|
||||
self.sys_jobs.reset_data()
|
||||
await self.sys_jobs.reset_data()
|
||||
|
||||
@api_process
|
||||
async def job_info(self, request: web.Request) -> dict[str, Any]:
|
||||
|
@@ -1,11 +1,12 @@
|
||||
"""Handle security part of this API."""
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
import re
|
||||
from typing import Final
|
||||
from urllib.parse import unquote
|
||||
|
||||
from aiohttp.web import Request, RequestHandler, Response, middleware
|
||||
from aiohttp.web import Request, Response, middleware
|
||||
from aiohttp.web_exceptions import HTTPBadRequest, HTTPForbidden, HTTPUnauthorized
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
@@ -19,11 +20,11 @@ from ...const import (
|
||||
ROLE_DEFAULT,
|
||||
ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER,
|
||||
CoreState,
|
||||
VALID_API_STATES,
|
||||
)
|
||||
from ...coresys import CoreSys, CoreSysAttributes
|
||||
from ...utils import version_is_new_enough
|
||||
from ..utils import api_return_error, excract_supervisor_token
|
||||
from ..utils import api_return_error, extract_supervisor_token
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
_CORE_VERSION: Final = AwesomeVersion("2023.3.4")
|
||||
@@ -179,9 +180,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return unquoted
|
||||
|
||||
@middleware
|
||||
async def block_bad_requests(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
async def block_bad_requests(self, request: Request, handler: Callable) -> Response:
|
||||
"""Process request and tblock commonly known exploit attempts."""
|
||||
if FILTERS.search(self._recursive_unquote(request.path)):
|
||||
_LOGGER.warning(
|
||||
@@ -199,15 +198,9 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def system_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
async def system_validation(self, request: Request, handler: Callable) -> Response:
|
||||
"""Check if core is ready to response."""
|
||||
if self.sys_core.state not in (
|
||||
CoreState.STARTUP,
|
||||
CoreState.RUNNING,
|
||||
CoreState.FREEZE,
|
||||
):
|
||||
if self.sys_core.state not in VALID_API_STATES:
|
||||
return api_return_error(
|
||||
message=f"System is not ready with state: {self.sys_core.state}"
|
||||
)
|
||||
@@ -215,12 +208,10 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def token_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
async def token_validation(self, request: Request, handler: Callable) -> Response:
|
||||
"""Check security access of this layer."""
|
||||
request_from = None
|
||||
supervisor_token = excract_supervisor_token(request)
|
||||
request_from: CoreSysAttributes | None = None
|
||||
supervisor_token = extract_supervisor_token(request)
|
||||
|
||||
# Blacklist
|
||||
if BLACKLIST.match(request.path):
|
||||
@@ -288,7 +279,7 @@ class SecurityMiddleware(CoreSysAttributes):
|
||||
raise HTTPForbidden()
|
||||
|
||||
@middleware
|
||||
async def core_proxy(self, request: Request, handler: RequestHandler) -> Response:
|
||||
async def core_proxy(self, request: Request, handler: Callable) -> Response:
|
||||
"""Validate user from Core API proxy."""
|
||||
if (
|
||||
request[REQUEST_FROM] != self.sys_homeassistant
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Inits file for supervisor mounts REST API."""
|
||||
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -10,7 +10,7 @@ from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APINotFound
|
||||
from ..mounts.const import ATTR_DEFAULT_BACKUP_MOUNT, MountUsage
|
||||
from ..mounts.mount import Mount
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG
|
||||
from ..mounts.validate import SCHEMA_MOUNT_CONFIG, MountData
|
||||
from .const import ATTR_MOUNTS, ATTR_USER_PATH
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
@@ -26,7 +26,7 @@ class APIMounts(CoreSysAttributes):
|
||||
|
||||
def _extract_mount(self, request: web.Request) -> Mount:
|
||||
"""Extract mount from request or raise."""
|
||||
name = request.match_info.get("mount")
|
||||
name = request.match_info["mount"]
|
||||
if name not in self.sys_mounts:
|
||||
raise APINotFound(f"No mount exists with name {name}")
|
||||
return self.sys_mounts.get(name)
|
||||
@@ -66,15 +66,15 @@ class APIMounts(CoreSysAttributes):
|
||||
else:
|
||||
self.sys_mounts.default_backup_mount = mount
|
||||
|
||||
self.sys_mounts.save_data()
|
||||
await self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def create_mount(self, request: web.Request) -> None:
|
||||
"""Create a new mount in supervisor."""
|
||||
body = await api_validate(SCHEMA_MOUNT_CONFIG, request)
|
||||
body = cast(MountData, await api_validate(SCHEMA_MOUNT_CONFIG, request))
|
||||
|
||||
if body[ATTR_NAME] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body[ATTR_NAME]}")
|
||||
if body["name"] in self.sys_mounts:
|
||||
raise APIError(f"A mount already exists with name {body['name']}")
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
@@ -87,7 +87,7 @@ class APIMounts(CoreSysAttributes):
|
||||
if not self.sys_mounts.default_backup_mount:
|
||||
self.sys_mounts.default_backup_mount = mount
|
||||
|
||||
self.sys_mounts.save_data()
|
||||
await self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def update_mount(self, request: web.Request) -> None:
|
||||
@@ -97,7 +97,10 @@ class APIMounts(CoreSysAttributes):
|
||||
{vol.Optional(ATTR_NAME, default=current.name): current.name},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
body = await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request)
|
||||
body = cast(
|
||||
MountData,
|
||||
await api_validate(vol.All(name_schema, SCHEMA_MOUNT_CONFIG), request),
|
||||
)
|
||||
|
||||
mount = Mount.from_dict(self.coresys, body)
|
||||
await self.sys_mounts.create_mount(mount)
|
||||
@@ -110,7 +113,7 @@ class APIMounts(CoreSysAttributes):
|
||||
elif self.sys_mounts.default_backup_mount == mount:
|
||||
self.sys_mounts.default_backup_mount = None
|
||||
|
||||
self.sys_mounts.save_data()
|
||||
await self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def delete_mount(self, request: web.Request) -> None:
|
||||
@@ -122,7 +125,7 @@ class APIMounts(CoreSysAttributes):
|
||||
if mount.usage == MountUsage.BACKUP:
|
||||
self.sys_create_task(self.sys_backups.reload())
|
||||
|
||||
self.sys_mounts.save_data()
|
||||
await self.sys_mounts.save_data()
|
||||
|
||||
@api_process
|
||||
async def reload_mount(self, request: web.Request) -> None:
|
||||
|
@@ -10,6 +10,7 @@ import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACCESSPOINTS,
|
||||
ATTR_ADDR_GEN_MODE,
|
||||
ATTR_ADDRESS,
|
||||
ATTR_AUTH,
|
||||
ATTR_CONNECTED,
|
||||
@@ -22,9 +23,12 @@ from ..const import (
|
||||
ATTR_ID,
|
||||
ATTR_INTERFACE,
|
||||
ATTR_INTERFACES,
|
||||
ATTR_IP6_PRIVACY,
|
||||
ATTR_IPV4,
|
||||
ATTR_IPV6,
|
||||
ATTR_LLMNR,
|
||||
ATTR_MAC,
|
||||
ATTR_MDNS,
|
||||
ATTR_METHOD,
|
||||
ATTR_MODE,
|
||||
ATTR_NAMESERVERS,
|
||||
@@ -38,17 +42,21 @@ from ..const import (
|
||||
ATTR_TYPE,
|
||||
ATTR_VLAN,
|
||||
ATTR_WIFI,
|
||||
DOCKER_IPV4_NETWORK_MASK,
|
||||
DOCKER_NETWORK,
|
||||
DOCKER_NETWORK_MASK,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APINotFound, HostNetworkNotFound
|
||||
from ..host.configuration import (
|
||||
AccessPoint,
|
||||
Interface,
|
||||
InterfaceAddrGenMode,
|
||||
InterfaceIp6Privacy,
|
||||
InterfaceMethod,
|
||||
Ip6Setting,
|
||||
IpConfig,
|
||||
IpSetting,
|
||||
MulticastDnsMode,
|
||||
VlanConfig,
|
||||
WifiConfig,
|
||||
)
|
||||
@@ -68,6 +76,8 @@ _SCHEMA_IPV6_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_ADDRESS): [vol.Coerce(IPv6Interface)],
|
||||
vol.Optional(ATTR_METHOD): vol.Coerce(InterfaceMethod),
|
||||
vol.Optional(ATTR_ADDR_GEN_MODE): vol.Coerce(InterfaceAddrGenMode),
|
||||
vol.Optional(ATTR_IP6_PRIVACY): vol.Coerce(InterfaceIp6Privacy),
|
||||
vol.Optional(ATTR_GATEWAY): vol.Coerce(IPv6Address),
|
||||
vol.Optional(ATTR_NAMESERVERS): [vol.Coerce(IPv6Address)],
|
||||
}
|
||||
@@ -90,12 +100,14 @@ SCHEMA_UPDATE = vol.Schema(
|
||||
vol.Optional(ATTR_IPV6): _SCHEMA_IPV6_CONFIG,
|
||||
vol.Optional(ATTR_WIFI): _SCHEMA_WIFI_CONFIG,
|
||||
vol.Optional(ATTR_ENABLED): vol.Boolean(),
|
||||
vol.Optional(ATTR_MDNS): vol.Coerce(MulticastDnsMode),
|
||||
vol.Optional(ATTR_LLMNR): vol.Coerce(MulticastDnsMode),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
|
||||
"""Return a dict with information about ip configuration."""
|
||||
def ip4config_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
|
||||
"""Return a dict with information about IPv4 configuration."""
|
||||
return {
|
||||
ATTR_METHOD: setting.method,
|
||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||
@@ -105,6 +117,19 @@ def ipconfig_struct(config: IpConfig, setting: IpSetting) -> dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def ip6config_struct(config: IpConfig, setting: Ip6Setting) -> dict[str, Any]:
|
||||
"""Return a dict with information about IPv6 configuration."""
|
||||
return {
|
||||
ATTR_METHOD: setting.method,
|
||||
ATTR_ADDR_GEN_MODE: setting.addr_gen_mode,
|
||||
ATTR_IP6_PRIVACY: setting.ip6_privacy,
|
||||
ATTR_ADDRESS: [address.with_prefixlen for address in config.address],
|
||||
ATTR_NAMESERVERS: [str(address) for address in config.nameservers],
|
||||
ATTR_GATEWAY: str(config.gateway) if config.gateway else None,
|
||||
ATTR_READY: config.ready,
|
||||
}
|
||||
|
||||
|
||||
def wifi_struct(config: WifiConfig) -> dict[str, Any]:
|
||||
"""Return a dict with information about wifi configuration."""
|
||||
return {
|
||||
@@ -132,10 +157,16 @@ def interface_struct(interface: Interface) -> dict[str, Any]:
|
||||
ATTR_CONNECTED: interface.connected,
|
||||
ATTR_PRIMARY: interface.primary,
|
||||
ATTR_MAC: interface.mac,
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4, interface.ipv4setting),
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6, interface.ipv6setting),
|
||||
ATTR_IPV4: ip4config_struct(interface.ipv4, interface.ipv4setting)
|
||||
if interface.ipv4 and interface.ipv4setting
|
||||
else None,
|
||||
ATTR_IPV6: ip6config_struct(interface.ipv6, interface.ipv6setting)
|
||||
if interface.ipv6 and interface.ipv6setting
|
||||
else None,
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||
ATTR_MDNS: interface.mdns,
|
||||
ATTR_LLMNR: interface.llmnr,
|
||||
}
|
||||
|
||||
|
||||
@@ -179,7 +210,7 @@ class APINetwork(CoreSysAttributes):
|
||||
],
|
||||
ATTR_DOCKER: {
|
||||
ATTR_INTERFACE: DOCKER_NETWORK,
|
||||
ATTR_ADDRESS: str(DOCKER_NETWORK_MASK),
|
||||
ATTR_ADDRESS: str(DOCKER_IPV4_NETWORK_MASK),
|
||||
ATTR_GATEWAY: str(self.sys_docker.network.gateway),
|
||||
ATTR_DNS: str(self.sys_docker.network.dns),
|
||||
},
|
||||
@@ -190,14 +221,14 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def interface_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return network information for a interface."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
|
||||
return interface_struct(interface)
|
||||
|
||||
@api_process
|
||||
async def interface_update(self, request: web.Request) -> None:
|
||||
"""Update the configuration of an interface."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
|
||||
# Validate data
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
@@ -208,28 +239,38 @@ class APINetwork(CoreSysAttributes):
|
||||
for key, config in body.items():
|
||||
if key == ATTR_IPV4:
|
||||
interface.ipv4setting = IpSetting(
|
||||
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||
config.get(ATTR_ADDRESS, []),
|
||||
config.get(ATTR_GATEWAY),
|
||||
config.get(ATTR_NAMESERVERS, []),
|
||||
method=config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||
address=config.get(ATTR_ADDRESS, []),
|
||||
gateway=config.get(ATTR_GATEWAY),
|
||||
nameservers=config.get(ATTR_NAMESERVERS, []),
|
||||
)
|
||||
elif key == ATTR_IPV6:
|
||||
interface.ipv6setting = IpSetting(
|
||||
config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||
config.get(ATTR_ADDRESS, []),
|
||||
config.get(ATTR_GATEWAY),
|
||||
config.get(ATTR_NAMESERVERS, []),
|
||||
interface.ipv6setting = Ip6Setting(
|
||||
method=config.get(ATTR_METHOD, InterfaceMethod.STATIC),
|
||||
addr_gen_mode=config.get(
|
||||
ATTR_ADDR_GEN_MODE, InterfaceAddrGenMode.DEFAULT
|
||||
),
|
||||
ip6_privacy=config.get(
|
||||
ATTR_IP6_PRIVACY, InterfaceIp6Privacy.DEFAULT
|
||||
),
|
||||
address=config.get(ATTR_ADDRESS, []),
|
||||
gateway=config.get(ATTR_GATEWAY),
|
||||
nameservers=config.get(ATTR_NAMESERVERS, []),
|
||||
)
|
||||
elif key == ATTR_WIFI:
|
||||
interface.wifi = WifiConfig(
|
||||
config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
|
||||
config.get(ATTR_SSID, ""),
|
||||
config.get(ATTR_AUTH, AuthMethod.OPEN),
|
||||
config.get(ATTR_PSK, None),
|
||||
None,
|
||||
mode=config.get(ATTR_MODE, WifiMode.INFRASTRUCTURE),
|
||||
ssid=config.get(ATTR_SSID, ""),
|
||||
auth=config.get(ATTR_AUTH, AuthMethod.OPEN),
|
||||
psk=config.get(ATTR_PSK, None),
|
||||
signal=None,
|
||||
)
|
||||
elif key == ATTR_ENABLED:
|
||||
interface.enabled = config
|
||||
elif key == ATTR_MDNS:
|
||||
interface.mdns = config
|
||||
elif key == ATTR_LLMNR:
|
||||
interface.llmnr = config
|
||||
|
||||
await asyncio.shield(self.sys_host.network.apply_changes(interface))
|
||||
|
||||
@@ -243,7 +284,7 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Scan and return a list of available networks."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
|
||||
# Only wlan is supported
|
||||
if interface.type != InterfaceType.WIRELESS:
|
||||
@@ -256,8 +297,10 @@ class APINetwork(CoreSysAttributes):
|
||||
@api_process
|
||||
async def create_vlan(self, request: web.Request) -> None:
|
||||
"""Create a new vlan."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
vlan = int(request.match_info.get(ATTR_VLAN))
|
||||
interface = self._get_interface(request.match_info[ATTR_INTERFACE])
|
||||
vlan = int(request.match_info.get(ATTR_VLAN, -1))
|
||||
if vlan < 0:
|
||||
raise APIError(f"Invalid vlan specified: {vlan}")
|
||||
|
||||
# Only ethernet is supported
|
||||
if interface.type != InterfaceType.ETHERNET:
|
||||
@@ -268,26 +311,41 @@ class APINetwork(CoreSysAttributes):
|
||||
|
||||
vlan_config = VlanConfig(vlan, interface.name)
|
||||
|
||||
mdns_mode = MulticastDnsMode.DEFAULT
|
||||
llmnr_mode = MulticastDnsMode.DEFAULT
|
||||
|
||||
if ATTR_MDNS in body:
|
||||
mdns_mode = body[ATTR_MDNS]
|
||||
|
||||
if ATTR_LLMNR in body:
|
||||
llmnr_mode = body[ATTR_LLMNR]
|
||||
|
||||
ipv4_setting = None
|
||||
if ATTR_IPV4 in body:
|
||||
ipv4_setting = IpSetting(
|
||||
body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||
body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||
body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||
method=body[ATTR_IPV4].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
address=body[ATTR_IPV4].get(ATTR_ADDRESS, []),
|
||||
gateway=body[ATTR_IPV4].get(ATTR_GATEWAY, None),
|
||||
nameservers=body[ATTR_IPV4].get(ATTR_NAMESERVERS, []),
|
||||
)
|
||||
|
||||
ipv6_setting = None
|
||||
if ATTR_IPV6 in body:
|
||||
ipv6_setting = IpSetting(
|
||||
body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||
body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||
body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
||||
ipv6_setting = Ip6Setting(
|
||||
method=body[ATTR_IPV6].get(ATTR_METHOD, InterfaceMethod.AUTO),
|
||||
addr_gen_mode=body[ATTR_IPV6].get(
|
||||
ATTR_ADDR_GEN_MODE, InterfaceAddrGenMode.DEFAULT
|
||||
),
|
||||
ip6_privacy=body[ATTR_IPV6].get(
|
||||
ATTR_IP6_PRIVACY, InterfaceIp6Privacy.DEFAULT
|
||||
),
|
||||
address=body[ATTR_IPV6].get(ATTR_ADDRESS, []),
|
||||
gateway=body[ATTR_IPV6].get(ATTR_GATEWAY, None),
|
||||
nameservers=body[ATTR_IPV6].get(ATTR_NAMESERVERS, []),
|
||||
)
|
||||
|
||||
vlan_interface = Interface(
|
||||
"",
|
||||
f"{interface.name}.{vlan}",
|
||||
"",
|
||||
"",
|
||||
True,
|
||||
@@ -300,5 +358,7 @@ class APINetwork(CoreSysAttributes):
|
||||
ipv6_setting,
|
||||
None,
|
||||
vlan_config,
|
||||
mdns=mdns_mode,
|
||||
llmnr=llmnr_mode,
|
||||
)
|
||||
await asyncio.shield(self.sys_host.network.apply_changes(vlan_interface))
|
||||
await asyncio.shield(self.sys_host.network.create_vlan(vlan_interface))
|
||||
|
@@ -3,6 +3,7 @@
|
||||
import asyncio
|
||||
from collections.abc import Awaitable
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
@@ -21,12 +22,14 @@ from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_SIZE,
|
||||
ATTR_STATE,
|
||||
ATTR_SWAP_SIZE,
|
||||
ATTR_SWAPPINESS,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import BoardInvalidError
|
||||
from ..exceptions import APINotFound, BoardInvalidError
|
||||
from ..resolution.const import ContextType, IssueType, SuggestionType
|
||||
from ..validate import version_tag
|
||||
from .const import (
|
||||
@@ -65,6 +68,15 @@ SCHEMA_GREEN_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_SYSTEM_HEALTH_LED): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
RE_SWAP_SIZE = re.compile(r"^\d+([KMG](i?B)?|B)?$", re.IGNORECASE)
|
||||
|
||||
SCHEMA_SWAP_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_SWAP_SIZE): vol.Match(RE_SWAP_SIZE),
|
||||
vol.Optional(ATTR_SWAPPINESS): vol.All(int, vol.Range(min=0, max=200)),
|
||||
}
|
||||
)
|
||||
# pylint: enable=no-value-for-parameter
|
||||
|
||||
|
||||
@@ -169,7 +181,7 @@ class APIOS(CoreSysAttributes):
|
||||
body[ATTR_SYSTEM_HEALTH_LED]
|
||||
)
|
||||
|
||||
self.sys_dbus.agent.board.green.save_data()
|
||||
await self.sys_dbus.agent.board.green.save_data()
|
||||
|
||||
@api_process
|
||||
async def boards_yellow_info(self, request: web.Request) -> dict[str, Any]:
|
||||
@@ -196,7 +208,7 @@ class APIOS(CoreSysAttributes):
|
||||
if ATTR_POWER_LED in body:
|
||||
await self.sys_dbus.agent.board.yellow.set_power_led(body[ATTR_POWER_LED])
|
||||
|
||||
self.sys_dbus.agent.board.yellow.save_data()
|
||||
await self.sys_dbus.agent.board.yellow.save_data()
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
@@ -212,3 +224,53 @@ class APIOS(CoreSysAttributes):
|
||||
)
|
||||
|
||||
return {}
|
||||
|
||||
@api_process
|
||||
async def config_swap_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Get swap settings."""
|
||||
if (
|
||||
not self.coresys.os.available
|
||||
or not self.coresys.os.version
|
||||
or self.coresys.os.version < "15.0"
|
||||
):
|
||||
raise APINotFound(
|
||||
"Home Assistant OS 15.0 or newer required for swap settings"
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_SWAP_SIZE: self.sys_dbus.agent.swap.swap_size,
|
||||
ATTR_SWAPPINESS: self.sys_dbus.agent.swap.swappiness,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def config_swap_options(self, request: web.Request) -> None:
|
||||
"""Update swap settings."""
|
||||
if (
|
||||
not self.coresys.os.available
|
||||
or not self.coresys.os.version
|
||||
or self.coresys.os.version < "15.0"
|
||||
):
|
||||
raise APINotFound(
|
||||
"Home Assistant OS 15.0 or newer required for swap settings"
|
||||
)
|
||||
|
||||
body = await api_validate(SCHEMA_SWAP_OPTIONS, request)
|
||||
|
||||
reboot_required = False
|
||||
|
||||
if ATTR_SWAP_SIZE in body:
|
||||
old_size = self.sys_dbus.agent.swap.swap_size
|
||||
await self.sys_dbus.agent.swap.set_swap_size(body[ATTR_SWAP_SIZE])
|
||||
reboot_required = reboot_required or old_size != body[ATTR_SWAP_SIZE]
|
||||
|
||||
if ATTR_SWAPPINESS in body:
|
||||
old_swappiness = self.sys_dbus.agent.swap.swappiness
|
||||
await self.sys_dbus.agent.swap.set_swappiness(body[ATTR_SWAPPINESS])
|
||||
reboot_required = reboot_required or old_swappiness != body[ATTR_SWAPPINESS]
|
||||
|
||||
if reboot_required:
|
||||
self.sys_resolution.create_issue(
|
||||
IssueType.REBOOT_REQUIRED,
|
||||
ContextType.SYSTEM,
|
||||
suggestions=[SuggestionType.EXECUTE_REBOOT],
|
||||
)
|
||||
|
@@ -1 +1 @@
|
||||
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(12[1-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(1{2}[5-9]|1[2-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(109|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(17\.([3-9]|\d{2,})|(1[89]|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(10[6-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(15[._]([6-9]|\d{2,})|(1[6-9]|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](12[1-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(12[1-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(12[1-9]|1[3-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[4-9]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[2-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.553a7707b827808b.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.ff48ee24e0742761.js")}else d("/api/hassio/app/frontend_es5/entrypoint.ff48ee24e0742761.js")}()
|
||||
!function(){function d(d){var e=document.createElement("script");e.src=d,document.body.appendChild(e)}if(/Edge?\/(13\d|1[4-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Firefox\/(13[1-9]|1[4-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Chrom(ium|e)\/(10[5-9]|1[1-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|(Maci|X1{2}).+ Version\/(18\.([1-9]|\d{2,})|(19|[2-9]\d|\d{3,})\.\d+)([,.]\d+|)( \(\w+\)|)( Mobile\/\w+|) Safari\/|Chrome.+OPR\/(1{2}[5-9]|1[2-9]\d|[2-9]\d{2}|\d{4,})\.\d+\.\d+|(CPU[ +]OS|iPhone[ +]OS|CPU[ +]iPhone|CPU IPhone OS|CPU iPad OS)[ +]+(18[._]([1-9]|\d{2,})|(19|[2-9]\d|\d{3,})[._]\d+)([._]\d+|)|Android:?[ /-](13\d|1[4-9]\d|[2-9]\d{2}|\d{4,})(\.\d+|)(\.\d+|)|Mobile Safari.+OPR\/([89]\d|\d{3,})\.\d+\.\d+|Android.+Firefox\/(13[1-9]|1[4-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|Android.+Chrom(ium|e)\/(13\d|1[4-9]\d|[2-9]\d{2}|\d{4,})\.\d+(\.\d+|)|SamsungBrowser\/(2[89]|[3-9]\d|\d{3,})\.\d+|Home As{2}istant\/[\d.]+ \(.+; macOS (1[3-9]|[2-9]\d|\d{3,})\.\d+(\.\d+)?\)/.test(navigator.userAgent))try{new Function("import('/api/hassio/app/frontend_latest/entrypoint.1e251476306cafd4.js')")()}catch(e){d("/api/hassio/app/frontend_es5/entrypoint.601ff5d4dddd11f9.js")}else d("/api/hassio/app/frontend_es5/entrypoint.601ff5d4dddd11f9.js")}()
|
BIN
supervisor/api/panel/entrypoint.js.br
Normal file
BIN
supervisor/api/panel/entrypoint.js.br
Normal file
Binary file not shown.
Binary file not shown.
2
supervisor/api/panel/frontend_es5/10.02c74d8ffd9bf568.js
Normal file
2
supervisor/api/panel/frontend_es5/10.02c74d8ffd9bf568.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/10.02c74d8ffd9bf568.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/10.02c74d8ffd9bf568.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/10.02c74d8ffd9bf568.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/10.02c74d8ffd9bf568.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1008.c2e44b88f5829db4.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1008.c2e44b88f5829db4.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1008.c2e44b88f5829db4.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1008.c2e44b88f5829db4.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1057.d306824fd6aa0497.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1057.d306824fd6aa0497.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1057.d306824fd6aa0497.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1057.d306824fd6aa0497.js.gz
Normal file
Binary file not shown.
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"1057.d306824fd6aa0497.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20250925.1/src/data/auth.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250925.1/src/data/entity.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250925.1/src/data/media-player.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250925.1/src/data/tts.ts","https://raw.githubusercontent.com/home-assistant/frontend/20250925.1/src/util/brands-url.ts"],"names":["autocompleteLoginFields","schema","map","field","type","name","Object","assign","autocomplete","autofocus","getSignedPath","hass","path","callWS","UNAVAILABLE","UNKNOWN","ON","OFF","UNAVAILABLE_STATES","OFF_STATES","isUnavailableState","arrayLiteralIncludes","MediaPlayerEntityFeature","BROWSER_PLAYER","MediaClassBrowserSettings","album","icon","layout","app","show_list_images","artist","mdiAccountMusic","channel","mdiTelevisionClassic","thumbnail_ratio","composer","contributing_artist","directory","episode","game","genre","image","movie","music","playlist","podcast","season","track","tv_show","url","video","browseMediaPlayer","entityId","mediaContentId","mediaContentType","entity_id","media_content_id","media_content_type","convertTextToSpeech","data","callApi","TTS_MEDIA_SOURCE_PREFIX","isTTSMediaSource","startsWith","getProviderFromTTSMediaSource","substring","listTTSEngines","language","country","getTTSEngine","engine_id","listTTSVoices","brandsUrl","options","brand","useFallback","domain","darkOptimized","extractDomainFromBrandUrl","split","isBrandUrl","thumbnail"],"mappings":"2QAyBO,MAEMA,EAA2BC,GACtCA,EAAOC,IAAKC,IACV,GAAmB,WAAfA,EAAMC,KAAmB,OAAOD,EACpC,OAAQA,EAAME,MACZ,IAAK,WACH,OAAAC,OAAAC,OAAAD,OAAAC,OAAA,GAAYJ,GAAK,IAAEK,aAAc,WAAYC,WAAW,IAC1D,IAAK,WACH,OAAAH,OAAAC,OAAAD,OAAAC,OAAA,GAAYJ,GAAK,IAAEK,aAAc,qBACnC,IAAK,OACH,OAAAF,OAAAC,OAAAD,OAAAC,OAAA,GAAYJ,GAAK,IAAEK,aAAc,gBAAiBC,WAAW,IAC/D,QACE,OAAON,KAIFO,EAAgBA,CAC3BC,EACAC,IACwBD,EAAKE,OAAO,CAAET,KAAM,iBAAkBQ,Q,gMC3CzD,MAAME,EAAc,cACdC,EAAU,UACVC,EAAK,KACLC,EAAM,MAENC,EAAqB,CAACJ,EAAaC,GACnCI,EAAa,CAACL,EAAaC,EAASE,GAEpCG,GAAqBC,EAAAA,EAAAA,GAAqBH,IAC7BG,EAAAA,EAAAA,GAAqBF,E,+gCCuExC,IAAWG,EAAA,SAAAA,G,qnBAAAA,C,CAAA,C,IAyBX,MAAMC,EAAiB,UAWjBC,EAGT,CACFC,MAAO,CAAEC,K,mQAAgBC,OAAQ,QACjCC,IAAK,CAAEF,K,6GAAsBC,OAAQ,OAAQE,kBAAkB,GAC/DC,OAAQ,CAAEJ,KAAMK,EAAiBJ,OAAQ,OAAQE,kBAAkB,GACnEG,QAAS,CACPN,KAAMO,EACNC,gBAAiB,WACjBP,OAAQ,OACRE,kBAAkB,GAEpBM,SAAU,CACRT,K,4cACAC,OAAQ,OACRE,kBAAkB,GAEpBO,oBAAqB,CACnBV,KAAMK,EACNJ,OAAQ,OACRE,kBAAkB,GAEpBQ,UAAW,CAAEX,K,gGAAiBC,OAAQ,OAAQE,kBAAkB,GAChES,QAAS,CACPZ,KAAMO,EACNN,OAAQ,OACRO,gBAAiB,WACjBL,kBAAkB,GAEpBU,KAAM,CACJb,K,qWACAC,OAAQ,OACRO,gBAAiB,YAEnBM,MAAO,CAAEd,K,4hCAAqBC,OAAQ,OAAQE,kBAAkB,GAChEY,MAAO,CAAEf,K,sHAAgBC,OAAQ,OAAQE,kBAAkB,GAC3Da,MAAO,CACLhB,K,6GACAQ,gBAAiB,WACjBP,OAAQ,OACRE,kBAAkB,GAEpBc,MAAO,CAAEjB,K,+NAAgBG,kBAAkB,GAC3Ce,SAAU,CAAElB,K,mJAAwBC,OAAQ,OAAQE,kBAAkB,GACtEgB,QAAS,CAAEnB,K,qpBAAkBC,OAAQ,QACrCmB,OAAQ,CACNpB,KAAMO,EACNN,OAAQ,OACRO,gBAAiB,WACjBL,kBAAkB,GAEpBkB,MAAO,CAAErB,K,mLACTsB,QAAS,CACPtB,KAAMO,EACNN,OAAQ,OACRO,gBAAiB,YAEnBe,IAAK,CAAEvB,K,w5BACPwB,MAAO,CAAExB,K,2GAAgBC,OAAQ,OAAQE,kBAAkB,IAkChDsB,EAAoBA,CAC/BxC,EACAyC,EACAC,EACAC,IAEA3C,EAAKE,OAAwB,CAC3BT,KAAM,4BACNmD,UAAWH,EACXI,iBAAkBH,EAClBI,mBAAoBH,G,yLC/MjB,MAAMI,EAAsBA,CACjC/C,EACAgD,IAOGhD,EAAKiD,QAAuC,OAAQ,cAAeD,GAElEE,EAA0B,sBAEnBC,EAAoBT,GAC/BA,EAAeU,WAAWF,GAEfG,EAAiCX,GAC5CA,EAAeY,UAAUJ,IAEdK,EAAiBA,CAC5BvD,EACAwD,EACAC,IAEAzD,EAAKE,OAAO,CACVT,KAAM,kBACN+D,WACAC,YAGSC,EAAeA,CAC1B1D,EACA2D,IAEA3D,EAAKE,OAAO,CACVT,KAAM,iBACNkE,cAGSC,EAAgBA,CAC3B5D,EACA2D,EACAH,IAEAxD,EAAKE,OAAO,CACVT,KAAM,oBACNkE,YACAH,Y,kHC9CG,MAAMK,EAAaC,GACxB,oCAAoCA,EAAQC,MAAQ,UAAY,KAC9DD,EAAQE,YAAc,KAAO,KAC5BF,EAAQG,UAAUH,EAAQI,cAAgB,QAAU,KACrDJ,EAAQrE,WAQC0E,EAA6B7B,GAAgBA,EAAI8B,MAAM,KAAK,GAE5DC,EAAcC,GACzBA,EAAUlB,WAAW,oC"}
|
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1076.205340b2a7c5d559.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1076.205340b2a7c5d559.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1076.205340b2a7c5d559.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1076.205340b2a7c5d559.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/1180.89c3426e7a24fa5c.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/1180.89c3426e7a24fa5c.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/1180.89c3426e7a24fa5c.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/1180.89c3426e7a24fa5c.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["12"],{5739:function(e,a,t){t.a(e,(async function(e,i){try{t.r(a),t.d(a,{HaNavigationSelector:function(){return b}});var n,r=t(63038),d=t(27862),o=t(52565),l=t(92776),u=t(5776),s=t(21475),c=(t(38419),t(57243)),h=t(50778),v=t(36522),k=t(63297),f=e([k]);k=(f.then?(await f)():f)[0];var b=(0,s.Z)([(0,h.Mo)("ha-selector-navigation")],(function(e,a){var t=function(a){function t(){var a;(0,o.Z)(this,t);for(var i=arguments.length,n=new Array(i),r=0;r<i;r++)n[r]=arguments[r];return a=(0,l.Z)(this,t,[].concat(n)),e(a),a}return(0,u.Z)(t,a),(0,d.Z)(t)}(a);return{F:t,d:[{kind:"field",decorators:[(0,h.Cb)({attribute:!1})],key:"hass",value:void 0},{kind:"field",decorators:[(0,h.Cb)({attribute:!1})],key:"selector",value:void 0},{kind:"field",decorators:[(0,h.Cb)()],key:"value",value:void 0},{kind:"field",decorators:[(0,h.Cb)()],key:"label",value:void 0},{kind:"field",decorators:[(0,h.Cb)()],key:"helper",value:void 0},{kind:"field",decorators:[(0,h.Cb)({type:Boolean,reflect:!0})],key:"disabled",value:function(){return!1}},{kind:"field",decorators:[(0,h.Cb)({type:Boolean})],key:"required",value:function(){return!0}},{kind:"method",key:"render",value:function(){return(0,c.dy)(n||(n=(0,r.Z)([' <ha-navigation-picker .hass="','" .label="','" .value="','" .required="','" .disabled="','" .helper="','" @value-changed="','"></ha-navigation-picker> '])),this.hass,this.label,this.value,this.required,this.disabled,this.helper,this._valueChanged)}},{kind:"method",key:"_valueChanged",value:function(e){(0,v.B)(this,"value-changed",{value:e.detail.value})}}]}}),c.oi);i()}catch(y){i(y)}}))}}]);
|
||||
//# sourceMappingURL=12.cd76ff0e6ff4d214.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"12.cd76ff0e6ff4d214.js","sources":["https://raw.githubusercontent.com/home-assistant/frontend/20241127.8/src/components/ha-selector/ha-selector-navigation.ts"],"names":["HaNavigationSelector","_decorate","customElement","_initialize","_LitElement","_LitElement2","_this","_classCallCheck","_len","arguments","length","args","Array","_key","_callSuper","concat","_inherits","_createClass","F","d","kind","decorators","property","attribute","key","value","type","Boolean","reflect","html","_templateObject","_taggedTemplateLiteral","this","hass","label","required","disabled","helper","_valueChanged","ev","fireEvent","detail","LitElement"],"mappings":"oYAOA,IACaA,GAAoBC,EAAAA,EAAAA,GAAA,EADhCC,EAAAA,EAAAA,IAAc,4BAAyB,SAAAC,EAAAC,GAAA,IAC3BJ,EAAoB,SAAAK,GAAA,SAAAL,IAAA,IAAAM,GAAAC,EAAAA,EAAAA,GAAA,KAAAP,GAAA,QAAAQ,EAAAC,UAAAC,OAAAC,EAAA,IAAAC,MAAAJ,GAAAK,EAAA,EAAAA,EAAAL,EAAAK,IAAAF,EAAAE,GAAAJ,UAAAI,GAAA,OAAAP,GAAAQ,EAAAA,EAAAA,GAAA,KAAAd,EAAA,GAAAe,OAAAJ,IAAAR,EAAAG,GAAAA,CAAA,QAAAU,EAAAA,EAAAA,GAAAhB,EAAAK,IAAAY,EAAAA,EAAAA,GAAAjB,EAAA,EAAAI,GAAA,OAAAc,EAApBlB,EAAoBmB,EAAA,EAAAC,KAAA,QAAAC,WAAA,EAC9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,OAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,IAAS,CAAEC,WAAW,KAAQC,IAAA,WAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAE9BC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,QAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,OAAUE,IAAA,SAAAC,WAAA,IAAAL,KAAA,QAAAC,WAAA,EAEVC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,QAASC,SAAS,KAAOJ,IAAA,WAAAC,MAAA,kBAAmB,CAAK,IAAAL,KAAA,QAAAC,WAAA,EAElEC,EAAAA,EAAAA,IAAS,CAAEI,KAAMC,WAAUH,IAAA,WAAAC,MAAA,kBAAmB,CAAI,IAAAL,KAAA,SAAAI,IAAA,SAAAC,MAEnD,WACE,OAAOI,EAAAA,EAAAA,IAAIC,IAAAA,GAAAC,EAAAA,EAAAA,GAAA,+JAECC,KAAKC,KACJD,KAAKE,MACLF,KAAKP,MACFO,KAAKG,SACLH,KAAKI,SACPJ,KAAKK,OACEL,KAAKM,cAG5B,GAAC,CAAAlB,KAAA,SAAAI,IAAA,gBAAAC,MAED,SAAsBc,IACpBC,EAAAA,EAAAA,GAAUR,KAAM,gBAAiB,CAAEP,MAAOc,EAAGE,OAAOhB,OACtD,IAAC,GA/BuCiB,EAAAA,I"}
|
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/120.c5f670671b56cb1c.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/120.c5f670671b56cb1c.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/120.c5f670671b56cb1c.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/120.c5f670671b56cb1c.js.gz
Normal file
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1236"],{4121:function(e,n,t){t(451),t(23509),Intl.PluralRules&&"function"==typeof Intl.PluralRules.__addLocaleData&&Intl.PluralRules.__addLocaleData({data:{categories:{cardinal:["one","other"],ordinal:["one","two","few","other"]},fn:function(e,n){var t=String(e).split("."),a=!t[1],l=Number(t[0])==e,o=l&&t[0].slice(-1),r=l&&t[0].slice(-2);return n?1==o&&11!=r?"one":2==o&&12!=r?"two":3==o&&13!=r?"few":"other":1==e&&a?"one":"other"}},locale:"en"})}}]);
|
||||
//# sourceMappingURL=1236.71e83acee5b952f8.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"file":"1236.71e83acee5b952f8.js","sources":["/unknown/node_modules/@formatjs/intl-pluralrules/locale-data/en.js"],"names":["Intl","PluralRules","__addLocaleData","n","ord","s","String","split","v0","t0","Number","n10","slice","n100"],"mappings":"6IAEIA,KAAKC,aAA2D,mBAArCD,KAAKC,YAAYC,iBAC9CF,KAAKC,YAAYC,gBAAgB,CAAC,KAAO,CAAC,WAAa,CAAC,SAAW,CAAC,MAAM,SAAS,QAAU,CAAC,MAAM,MAAM,MAAM,UAAU,GAAK,SAASC,EAAGC,GAC3I,IAAIC,EAAIC,OAAOH,GAAGI,MAAM,KAAMC,GAAMH,EAAE,GAAII,EAAKC,OAAOL,EAAE,KAAOF,EAAGQ,EAAMF,GAAMJ,EAAE,GAAGO,OAAO,GAAIC,EAAOJ,GAAMJ,EAAE,GAAGO,OAAO,GACvH,OAAIR,EAAmB,GAAPO,GAAoB,IAARE,EAAa,MAC9B,GAAPF,GAAoB,IAARE,EAAa,MAClB,GAAPF,GAAoB,IAARE,EAAa,MACzB,QACQ,GAALV,GAAUK,EAAK,MAAQ,OAChC,GAAG,OAAS,M"}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1 +0,0 @@
|
||||
"use strict";(self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[]).push([["1295"],{21393:function(s,n,e){e.r(n)}}]);
|
Binary file not shown.
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/131.566df1af9c07775a.js.br
Normal file
BIN
supervisor/api/panel/frontend_es5/131.566df1af9c07775a.js.br
Normal file
Binary file not shown.
BIN
supervisor/api/panel/frontend_es5/131.566df1af9c07775a.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/131.566df1af9c07775a.js.gz
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user