mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-14 11:39:21 +00:00
Compare commits
656 Commits
2020.12.1
...
refresh-up
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ec897081cd | ||
![]() |
839361133a | ||
![]() |
b651d63758 | ||
![]() |
400d3981a2 | ||
![]() |
69c2517d52 | ||
![]() |
c8b49aba42 | ||
![]() |
8071b107e7 | ||
![]() |
603d19b075 | ||
![]() |
a5ce2ef7cb | ||
![]() |
f392dc5492 | ||
![]() |
0c63883269 | ||
![]() |
612d4f950b | ||
![]() |
1799c765b4 | ||
![]() |
809ac1ffca | ||
![]() |
fefc99e825 | ||
![]() |
d994170a9d | ||
![]() |
d8c934365a | ||
![]() |
e0fd31c390 | ||
![]() |
22238c9c0e | ||
![]() |
5ff96cfa5e | ||
![]() |
e22a19df1a | ||
![]() |
f57bc0db25 | ||
![]() |
6ba6b5ea56 | ||
![]() |
5dc9f9235e | ||
![]() |
323fa2e637 | ||
![]() |
0986419b2f | ||
![]() |
f0bc952269 | ||
![]() |
9266997482 | ||
![]() |
75d252e21a | ||
![]() |
368e94f95f | ||
![]() |
3fbecf89db | ||
![]() |
54e6ae5fd9 | ||
![]() |
5b96074055 | ||
![]() |
5503f93a75 | ||
![]() |
eadc629cd9 | ||
![]() |
cde45e2e7a | ||
![]() |
050851a9ac | ||
![]() |
86bd16b2ba | ||
![]() |
ce9181b05f | ||
![]() |
f7ba364076 | ||
![]() |
3511c19726 | ||
![]() |
d9ed58696b | ||
![]() |
373f452774 | ||
![]() |
e54efa681f | ||
![]() |
79cd8ac390 | ||
![]() |
dc24f332f8 | ||
![]() |
99cdf7b028 | ||
![]() |
54edfa53bc | ||
![]() |
571c9a05c6 | ||
![]() |
864b7bf023 | ||
![]() |
e303431d74 | ||
![]() |
19dd40275c | ||
![]() |
4cf970e37a | ||
![]() |
7947c27089 | ||
![]() |
d0e2c8b694 | ||
![]() |
19e3a859b0 | ||
![]() |
e6557ded34 | ||
![]() |
f4aae4522d | ||
![]() |
2066aefd6d | ||
![]() |
2f56cab953 | ||
![]() |
883399f583 | ||
![]() |
47f53501e5 | ||
![]() |
b23a89e6fb | ||
![]() |
7764decc37 | ||
![]() |
88490140af | ||
![]() |
61d56dce9c | ||
![]() |
838af87ad7 | ||
![]() |
8f263ab345 | ||
![]() |
6b76086652 | ||
![]() |
efa5205800 | ||
![]() |
a0c8b77737 | ||
![]() |
9ee0efe6c0 | ||
![]() |
a2af63d050 | ||
![]() |
da246dc40a | ||
![]() |
3c52f87cdc | ||
![]() |
d80d76a24d | ||
![]() |
8653f7a0e1 | ||
![]() |
8458d9e0f6 | ||
![]() |
5d4ce94155 | ||
![]() |
828cf773cc | ||
![]() |
a902b55df7 | ||
![]() |
f38cde4c68 | ||
![]() |
4c9cbb112e | ||
![]() |
3d814f3c44 | ||
![]() |
f269f72082 | ||
![]() |
f07193dc3c | ||
![]() |
d2b706df05 | ||
![]() |
e5817e9445 | ||
![]() |
85313f26ea | ||
![]() |
f864613ffb | ||
![]() |
36ea8b2bb4 | ||
![]() |
df9d62f874 | ||
![]() |
4a6aaa8559 | ||
![]() |
435f479984 | ||
![]() |
e2f39059c6 | ||
![]() |
531073d5ec | ||
![]() |
ef5b6a5f4c | ||
![]() |
03f0a136ab | ||
![]() |
7a6663ba80 | ||
![]() |
9dd5eee1ae | ||
![]() |
bb474a5c14 | ||
![]() |
6ab4dda5e8 | ||
![]() |
8a553dbb59 | ||
![]() |
1ee6c0491c | ||
![]() |
cc50a91a42 | ||
![]() |
637377f81d | ||
![]() |
a90f70e017 | ||
![]() |
949ecb255d | ||
![]() |
15f62837c8 | ||
![]() |
e5246a5b1d | ||
![]() |
394d66290d | ||
![]() |
79d541185f | ||
![]() |
b433d129ef | ||
![]() |
4b0278fee8 | ||
![]() |
8c59e6d05a | ||
![]() |
5c66278a1c | ||
![]() |
7abe9487a0 | ||
![]() |
73832dd6d6 | ||
![]() |
6cc3df54e9 | ||
![]() |
c07c7c5146 | ||
![]() |
a6d1078fe3 | ||
![]() |
eba6da485d | ||
![]() |
de880e24ed | ||
![]() |
f344df9e5c | ||
![]() |
5af62a8834 | ||
![]() |
800fb683f8 | ||
![]() |
ad2566d58a | ||
![]() |
6c679b07e1 | ||
![]() |
aa4f4c8d47 | ||
![]() |
b83da5d89f | ||
![]() |
0afff9a9e2 | ||
![]() |
0433d72ae6 | ||
![]() |
d33beb06cd | ||
![]() |
279d6ccd79 | ||
![]() |
af628293f3 | ||
![]() |
df6b815175 | ||
![]() |
d6127832a7 | ||
![]() |
8240623806 | ||
![]() |
2b4527fa64 | ||
![]() |
23143aede4 | ||
![]() |
8b93f0aee7 | ||
![]() |
5cc4a9a929 | ||
![]() |
288d2e5bdb | ||
![]() |
73d84113ea | ||
![]() |
4b15945ca1 | ||
![]() |
10720b2988 | ||
![]() |
bb991b69bb | ||
![]() |
7c9f6067c0 | ||
![]() |
e960a70217 | ||
![]() |
9b0a2e6da9 | ||
![]() |
cd0c151bd9 | ||
![]() |
b03c8c24dd | ||
![]() |
4416b6524e | ||
![]() |
c9d3f65cc8 | ||
![]() |
0407122fbe | ||
![]() |
5e871d9399 | ||
![]() |
6df7a88666 | ||
![]() |
5933b66b1c | ||
![]() |
a85e816cd7 | ||
![]() |
96f6c07912 | ||
![]() |
40bcee38f3 | ||
![]() |
6d2a38c96e | ||
![]() |
dafc2cfec2 | ||
![]() |
04f36e92e1 | ||
![]() |
4f97013df4 | ||
![]() |
53eae96a98 | ||
![]() |
74530baeb7 | ||
![]() |
271e4f0cc4 | ||
![]() |
f4c7f2cae1 | ||
![]() |
24cdb4787a | ||
![]() |
57b1c21af4 | ||
![]() |
f0eddb6926 | ||
![]() |
7c74c1bd8c | ||
![]() |
2d4a85ae43 | ||
![]() |
d48c439737 | ||
![]() |
874c50d3e8 | ||
![]() |
4beaf571c2 | ||
![]() |
58a948447e | ||
![]() |
32af7ef28b | ||
![]() |
208fb549b7 | ||
![]() |
ab704c11cf | ||
![]() |
966b962ccf | ||
![]() |
4ea3695982 | ||
![]() |
b2abe37d72 | ||
![]() |
9bf8d15b01 | ||
![]() |
70acbffc23 | ||
![]() |
c9b1eb751e | ||
![]() |
ad8d850ed7 | ||
![]() |
1b0eb9397d | ||
![]() |
8572f8c4e5 | ||
![]() |
66565dde87 | ||
![]() |
d54c23952f | ||
![]() |
62b364ea29 | ||
![]() |
e6f00144f2 | ||
![]() |
8894984c12 | ||
![]() |
49fbdedf6b | ||
![]() |
0899c16895 | ||
![]() |
0747a7e4b2 | ||
![]() |
0123d7935d | ||
![]() |
7a1009446b | ||
![]() |
034606cd0f | ||
![]() |
ddc30cfd7d | ||
![]() |
f10fccaff8 | ||
![]() |
31001280c8 | ||
![]() |
9638775944 | ||
![]() |
fbec0befde | ||
![]() |
81e7fac848 | ||
![]() |
97599b3e70 | ||
![]() |
c94b23a3fd | ||
![]() |
9758980ae0 | ||
![]() |
6ab3fbaab3 | ||
![]() |
4933ff83df | ||
![]() |
71e12ecb2b | ||
![]() |
36687530e0 | ||
![]() |
e7b5864c03 | ||
![]() |
9497f85db9 | ||
![]() |
419f603571 | ||
![]() |
f4f1fc524d | ||
![]() |
6d9f44a900 | ||
![]() |
aeb9b26d44 | ||
![]() |
631f78f468 | ||
![]() |
13cedb308e | ||
![]() |
82c183e1a8 | ||
![]() |
25cf1e7394 | ||
![]() |
91509a4205 | ||
![]() |
d93ebd15a2 | ||
![]() |
85e7f817e6 | ||
![]() |
772cadb435 | ||
![]() |
853aeef583 | ||
![]() |
cd07bde307 | ||
![]() |
3057df3181 | ||
![]() |
fe785622ec | ||
![]() |
2b6829a786 | ||
![]() |
7c6c982414 | ||
![]() |
07eeb2eaf2 | ||
![]() |
223f5b7bb1 | ||
![]() |
8a9657c452 | ||
![]() |
564e9811d0 | ||
![]() |
b944b52b21 | ||
![]() |
24aecdddf3 | ||
![]() |
7bbfb60039 | ||
![]() |
ece40008c7 | ||
![]() |
0177b38ded | ||
![]() |
16f2f63081 | ||
![]() |
5f376c2a27 | ||
![]() |
90a6f109ee | ||
![]() |
e6fd0ef5dc | ||
![]() |
d46ab56901 | ||
![]() |
3b1ad5c0cd | ||
![]() |
de8a241e72 | ||
![]() |
a4a0b43d91 | ||
![]() |
adf355e54f | ||
![]() |
4f9e646b4c | ||
![]() |
ce57d384ca | ||
![]() |
d53d526673 | ||
![]() |
cd8fc16bcb | ||
![]() |
6b58970354 | ||
![]() |
b70ed9a60d | ||
![]() |
22c8ff1314 | ||
![]() |
ba2cf8078e | ||
![]() |
ef138b619b | ||
![]() |
9252af5ddb | ||
![]() |
bcef34012d | ||
![]() |
2f18c177ae | ||
![]() |
12487fb69d | ||
![]() |
e629bab8ee | ||
![]() |
e85d7c3d2e | ||
![]() |
64c59d0fe9 | ||
![]() |
522cbe3295 | ||
![]() |
fd185fc326 | ||
![]() |
6ddc135266 | ||
![]() |
f8d5279d9c | ||
![]() |
2acae9af57 | ||
![]() |
b425d21d05 | ||
![]() |
4c7ba20a58 | ||
![]() |
a4f325dd2e | ||
![]() |
a99bfa2926 | ||
![]() |
bb127a614b | ||
![]() |
6f2f005897 | ||
![]() |
e22a20c165 | ||
![]() |
20c2121e5f | ||
![]() |
8a5831d6b2 | ||
![]() |
fb81946240 | ||
![]() |
4bec86c58c | ||
![]() |
7034b79991 | ||
![]() |
7b9a09dc4b | ||
![]() |
0746c4dec5 | ||
![]() |
6dadb933bd | ||
![]() |
07197e6a50 | ||
![]() |
6c79fb8325 | ||
![]() |
7488750ee4 | ||
![]() |
c9574254aa | ||
![]() |
f466721ffa | ||
![]() |
3834cead07 | ||
![]() |
75975de201 | ||
![]() |
cb9f998ef1 | ||
![]() |
eb9ce8ea1f | ||
![]() |
a5ed68b641 | ||
![]() |
1ef46424ea | ||
![]() |
53c99547d0 | ||
![]() |
a34e7622d2 | ||
![]() |
b234c18664 | ||
![]() |
d8d594c728 | ||
![]() |
1cd35841e8 | ||
![]() |
d05b7edd87 | ||
![]() |
95ef7d4508 | ||
![]() |
9812e5be6a | ||
![]() |
183182943d | ||
![]() |
a0189d65de | ||
![]() |
b59f741162 | ||
![]() |
efc2e826a1 | ||
![]() |
a3ad23e262 | ||
![]() |
5e3bcbfaac | ||
![]() |
7f3e4558b9 | ||
![]() |
567a01c2ed | ||
![]() |
2236cf146e | ||
![]() |
8e2f33ba1e | ||
![]() |
8190883a71 | ||
![]() |
c01218a97a | ||
![]() |
2437817a41 | ||
![]() |
682ee4529e | ||
![]() |
cee520f0b5 | ||
![]() |
0d915a3efc | ||
![]() |
f3a562006a | ||
![]() |
d78091cc60 | ||
![]() |
f785c4e909 | ||
![]() |
cda66ba737 | ||
![]() |
ea68ffc5a4 | ||
![]() |
31b0b721c8 | ||
![]() |
b97e33f5d5 | ||
![]() |
29e55d3664 | ||
![]() |
9112f27dc0 | ||
![]() |
9e67df26b3 | ||
![]() |
37d1a577ef | ||
![]() |
1eebb31004 | ||
![]() |
885764ea1c | ||
![]() |
b3d184b5c7 | ||
![]() |
96d04ec17e | ||
![]() |
e0bb3ad609 | ||
![]() |
1a8842cb81 | ||
![]() |
092d526749 | ||
![]() |
9db95c188a | ||
![]() |
0e45fc7d66 | ||
![]() |
4d1ddbfa2b | ||
![]() |
caa1c6f1bd | ||
![]() |
10d686b415 | ||
![]() |
29fae90da5 | ||
![]() |
e27337da85 | ||
![]() |
8f22316869 | ||
![]() |
dd10d3e037 | ||
![]() |
4a53c62af8 | ||
![]() |
1ebbf2b693 | ||
![]() |
62d198111c | ||
![]() |
1fc0ab71aa | ||
![]() |
f4402a1633 | ||
![]() |
13a17bcb34 | ||
![]() |
e1b49d90c2 | ||
![]() |
85ab25ea16 | ||
![]() |
80131ddfa8 | ||
![]() |
e9c123459f | ||
![]() |
d3e4bb7219 | ||
![]() |
fd98d38125 | ||
![]() |
3237611034 | ||
![]() |
ce2bffda15 | ||
![]() |
977e7b7adc | ||
![]() |
5082078527 | ||
![]() |
3615091c93 | ||
![]() |
fb1eb44d82 | ||
![]() |
13910d44bf | ||
![]() |
cda1d15070 | ||
![]() |
d0a1de23a6 | ||
![]() |
44fd75220f | ||
![]() |
ed594d653f | ||
![]() |
40bb3a7581 | ||
![]() |
df7f0345e8 | ||
![]() |
f7ab76bb9a | ||
![]() |
45e24bfa65 | ||
![]() |
8cd149783c | ||
![]() |
8e8e6e48a9 | ||
![]() |
816e0d503a | ||
![]() |
c43acd50f4 | ||
![]() |
16ce4296a2 | ||
![]() |
65386b753f | ||
![]() |
2be1529cb8 | ||
![]() |
98f8e032e3 | ||
![]() |
900b785789 | ||
![]() |
9194088947 | ||
![]() |
58c40cbef6 | ||
![]() |
e6c57dfc80 | ||
![]() |
82f76f60bd | ||
![]() |
b9af4aec6b | ||
![]() |
f71ce27248 | ||
![]() |
5b2b1765bc | ||
![]() |
2a892544c2 | ||
![]() |
bedb37ca6b | ||
![]() |
a456cd645f | ||
![]() |
9c68094cf6 | ||
![]() |
379cef9e35 | ||
![]() |
cb3e2dab71 | ||
![]() |
3e89f83e0b | ||
![]() |
af0bdd890a | ||
![]() |
f93f5d0e71 | ||
![]() |
667672a20b | ||
![]() |
9e1f899274 | ||
![]() |
75e0741665 | ||
![]() |
392d0e929b | ||
![]() |
b342073ba9 | ||
![]() |
ff4e550ba3 | ||
![]() |
17aa544be5 | ||
![]() |
390676dbc4 | ||
![]() |
d423252bc7 | ||
![]() |
790e887b70 | ||
![]() |
47e377683e | ||
![]() |
b1232c0d8d | ||
![]() |
059233c111 | ||
![]() |
55382d000b | ||
![]() |
75ab6eec43 | ||
![]() |
e30171746b | ||
![]() |
73849b7468 | ||
![]() |
a52713611c | ||
![]() |
85a66c663c | ||
![]() |
e478e68b70 | ||
![]() |
16095c319a | ||
![]() |
f4a6100fba | ||
![]() |
82060dd242 | ||
![]() |
a58cfb797c | ||
![]() |
c8256a50f4 | ||
![]() |
3ae974e9e2 | ||
![]() |
ac5e74a375 | ||
![]() |
05e3d3b779 | ||
![]() |
681a1ecff5 | ||
![]() |
2b411b0bf9 | ||
![]() |
fee16847d3 | ||
![]() |
501a52a3c6 | ||
![]() |
2bb014fda5 | ||
![]() |
09203f67b2 | ||
![]() |
169c7ec004 | ||
![]() |
202e94615e | ||
![]() |
5fe2a815ad | ||
![]() |
a13a0b4770 | ||
![]() |
455bbc457b | ||
![]() |
d50fd3b580 | ||
![]() |
455e80b07c | ||
![]() |
291becbdf9 | ||
![]() |
33385b46a7 | ||
![]() |
df17668369 | ||
![]() |
43449c85bb | ||
![]() |
9e86eda05a | ||
![]() |
b288554d9c | ||
![]() |
bee55d08fb | ||
![]() |
7a542aeb38 | ||
![]() |
8d42513ba8 | ||
![]() |
89b7247aa2 | ||
![]() |
29132e7f4c | ||
![]() |
3fd9baf78e | ||
![]() |
f3aa3757ce | ||
![]() |
3760967f59 | ||
![]() |
f7ab8e0f7f | ||
![]() |
0e46ea12b2 | ||
![]() |
be226b2b01 | ||
![]() |
9e1239e192 | ||
![]() |
2eba3d85b0 | ||
![]() |
9b569268ab | ||
![]() |
31f5033dca | ||
![]() |
78d9c60be5 | ||
![]() |
baa86f09e5 | ||
![]() |
a4c4b39ba8 | ||
![]() |
752068bb56 | ||
![]() |
739cfbb273 | ||
![]() |
115af4cadf | ||
![]() |
ae3274e559 | ||
![]() |
c61f096dbd | ||
![]() |
ee7b5c42fd | ||
![]() |
85d527bfbc | ||
![]() |
dd561da819 | ||
![]() |
cb5932cb8b | ||
![]() |
8630adc54a | ||
![]() |
90d8832cd2 | ||
![]() |
3802b97bb6 | ||
![]() |
2de175e181 | ||
![]() |
6b7d437b00 | ||
![]() |
e2faf906de | ||
![]() |
bb44ce5cd2 | ||
![]() |
15544ae589 | ||
![]() |
e421284471 | ||
![]() |
785dc64787 | ||
![]() |
7e7e3a7876 | ||
![]() |
2b45c059e0 | ||
![]() |
14ec61f9bd | ||
![]() |
5cc72756f8 | ||
![]() |
44785ef3e2 | ||
![]() |
e60d858feb | ||
![]() |
b31ecfefcd | ||
![]() |
c342231052 | ||
![]() |
673666837e | ||
![]() |
c8f74d6c0d | ||
![]() |
7ed9de8014 | ||
![]() |
8650947f04 | ||
![]() |
a0ac8ced31 | ||
![]() |
2145bbea81 | ||
![]() |
480000ee7f | ||
![]() |
9ec2ad022e | ||
![]() |
43e40816dc | ||
![]() |
941ea3ee68 | ||
![]() |
a6e4b5159e | ||
![]() |
6f542d58d5 | ||
![]() |
b2b5fcee7d | ||
![]() |
59a82345a9 | ||
![]() |
b61a747876 | ||
![]() |
72e5d800d5 | ||
![]() |
c7aa6d4804 | ||
![]() |
b31063449d | ||
![]() |
477672459d | ||
![]() |
9c33897296 | ||
![]() |
100cfb57c5 | ||
![]() |
40b34071e7 | ||
![]() |
341833fd8f | ||
![]() |
f647fd6fea | ||
![]() |
53642f2389 | ||
![]() |
b9bdd655ab | ||
![]() |
e9e1b5b54f | ||
![]() |
be2163d635 | ||
![]() |
7f6dde3a5f | ||
![]() |
334aafee23 | ||
![]() |
1a20c18b19 | ||
![]() |
6e655b165c | ||
![]() |
d768b2fa1e | ||
![]() |
85bce1cfba | ||
![]() |
a798a2466f | ||
![]() |
2a5d8a5c82 | ||
![]() |
ea62171d98 | ||
![]() |
196389d5ee | ||
![]() |
1776021620 | ||
![]() |
c42a9124d3 | ||
![]() |
a44647b4cd | ||
![]() |
e0c3fd87c5 | ||
![]() |
ed8f2a85b7 | ||
![]() |
48f8553c75 | ||
![]() |
af4517fd1e | ||
![]() |
78e6a46318 | ||
![]() |
49ca923e51 | ||
![]() |
7ad22e0399 | ||
![]() |
bb8acc6065 | ||
![]() |
c0fa4a19e9 | ||
![]() |
3f1741dd18 | ||
![]() |
9ef02e4110 | ||
![]() |
15a6f38ebb | ||
![]() |
227f2e5a21 | ||
![]() |
517d6ee981 | ||
![]() |
184eeb7f49 | ||
![]() |
a3555c74e8 | ||
![]() |
657bafd458 | ||
![]() |
2ad5df420c | ||
![]() |
b24d489ec5 | ||
![]() |
6bb0210f1f | ||
![]() |
c1de50266a | ||
![]() |
562e02bc64 | ||
![]() |
f71ec7913a | ||
![]() |
d98baaf660 | ||
![]() |
72db591576 | ||
![]() |
509a37fc04 | ||
![]() |
17f62b6e86 | ||
![]() |
b09aee7644 | ||
![]() |
babcc0de0c | ||
![]() |
5cc47c9222 | ||
![]() |
833559a3b3 | ||
![]() |
b8a976b344 | ||
![]() |
10b14132b9 | ||
![]() |
18953f0b7c | ||
![]() |
19f5fba3aa | ||
![]() |
636bc3e61a | ||
![]() |
521037e1a6 | ||
![]() |
e024c3e38d | ||
![]() |
6a0206c1e7 | ||
![]() |
69a8a83528 | ||
![]() |
0307d700fa | ||
![]() |
919c383b41 | ||
![]() |
d331af4d5a | ||
![]() |
b5467d3c23 | ||
![]() |
3ef0040d66 | ||
![]() |
ec4dfd2172 | ||
![]() |
8f54d7c8e9 | ||
![]() |
b59e709dc0 | ||
![]() |
b236e6c886 | ||
![]() |
8acbb7d6f0 | ||
![]() |
49e4bc9381 | ||
![]() |
36106cc08d | ||
![]() |
8f1763abe2 | ||
![]() |
480eebc6cb | ||
![]() |
dccfffd979 | ||
![]() |
1a978f4762 | ||
![]() |
1fbc8f4060 | ||
![]() |
db3fc1421c | ||
![]() |
9ecd03db0e | ||
![]() |
f111ccb1b6 | ||
![]() |
a32341cc5d | ||
![]() |
f73e277230 | ||
![]() |
8d8587ca29 | ||
![]() |
88eb9511bf | ||
![]() |
e1068997ea | ||
![]() |
560e04c64a | ||
![]() |
621ec03971 | ||
![]() |
d14a47d3f7 | ||
![]() |
3e9de0c210 | ||
![]() |
01a6e074a5 | ||
![]() |
1434077f4e | ||
![]() |
3922175af1 | ||
![]() |
ec6852a8d7 | ||
![]() |
b0b908b4ae | ||
![]() |
5a00336ef1 | ||
![]() |
d53f5e21f4 | ||
![]() |
bd173fa333 | ||
![]() |
6b32fa31b6 | ||
![]() |
d1dba89e39 | ||
![]() |
b2f2806465 | ||
![]() |
3b70cd58a3 | ||
![]() |
6769bfd824 | ||
![]() |
bff4af2534 | ||
![]() |
aabf575ac5 | ||
![]() |
4aacaf6bd6 | ||
![]() |
268070e89c | ||
![]() |
4fa2134cc6 | ||
![]() |
97c35de49a | ||
![]() |
32fb550969 | ||
![]() |
3457441929 | ||
![]() |
32f0fc7a46 | ||
![]() |
c891a8f164 | ||
![]() |
991764af94 | ||
![]() |
1df447272e | ||
![]() |
f032ae757b | ||
![]() |
e529b2859e | ||
![]() |
d1cb2368fa | ||
![]() |
7b812184d1 | ||
![]() |
4f7ce1c6ee | ||
![]() |
44a5ac63db | ||
![]() |
0989ee88cc | ||
![]() |
ba8b72c2c8 | ||
![]() |
2dbb4583f4 | ||
![]() |
81ad42e029 | ||
![]() |
5d3695f8ba | ||
![]() |
c771694aa0 | ||
![]() |
1ac0fd4c10 | ||
![]() |
631ff8caef | ||
![]() |
7382182132 | ||
![]() |
4ff9da68ef | ||
![]() |
dee2998ee3 | ||
![]() |
1d43236211 | ||
![]() |
792bc610a3 | ||
![]() |
7a51c828c2 | ||
![]() |
fab6fcd5ac | ||
![]() |
c8e00ba160 | ||
![]() |
6245b6d823 | ||
![]() |
0c55bf20fc | ||
![]() |
f8fd7b5933 | ||
![]() |
6462eea2ef | ||
![]() |
3d79891249 |
@@ -1,49 +0,0 @@
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Install Node/Yarn for Frontent
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
apt-utils \
|
||||
apt-transport-https \
|
||||
&& curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
nodejs \
|
||||
yarn \
|
||||
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV NVM_DIR /root/.nvm
|
||||
|
||||
# Install docker
|
||||
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
curl \
|
||||
software-properties-common \
|
||||
gpg-agent \
|
||||
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
docker-ce \
|
||||
docker-ce-cli \
|
||||
containerd.io \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install tools
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
jq \
|
||||
dbus \
|
||||
network-manager \
|
||||
libpulse0 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt ./
|
||||
RUN pip3 install -U setuptools pip \
|
||||
&& pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
&& pip3 install tox \
|
||||
&& rm -f requirements.txt requirements_tests.txt
|
@@ -1,9 +1,8 @@
|
||||
{
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"postCreateCommand": "pre-commit install",
|
||||
"image": "ghcr.io/home-assistant/devcontainer:supervisor",
|
||||
"appPort": ["9123:8123", "7357:4357"],
|
||||
"postCreateCommand": "bash devcontainer_bootstrap",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
@@ -12,7 +11,12 @@
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/usr/bin/zsh"
|
||||
}
|
||||
},
|
||||
"terminal.integrated.defaultProfile.linux": "zsh",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
@@ -21,7 +25,7 @@
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||
"python.formatting.blackArgs": ["--target-version", "py39"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
|
@@ -1,5 +1,5 @@
|
||||
---
|
||||
name: Report a bug with the Supervisor
|
||||
name: Report a bug with the Supervisor on a supported System
|
||||
about: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
---
|
||||
@@ -11,6 +11,10 @@ labels: bug
|
||||
- If you have a problem with an add-on, make an issue in it's repository.
|
||||
-->
|
||||
|
||||
<!--
|
||||
Important: You can only fill a bug repport for an supported system! If you run an unsupported installation. This report would be closed without comment.
|
||||
-->
|
||||
|
||||
### Describe the issue
|
||||
|
||||
<!-- Provide as many details as possible. -->
|
||||
@@ -47,3 +51,19 @@ Paste supervisor logs here
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### System Information
|
||||
|
||||
<details>
|
||||
<summary>System Information</summary>
|
||||
<!--
|
||||
- Use this command: ha info
|
||||
-->
|
||||
|
||||
```
|
||||
Paste system info here
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
98
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
98
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,98 @@
|
||||
name: Bug Report Form
|
||||
description: Report an issue related to the Home Assistant Supervisor.
|
||||
labels: bug
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
This issue form is for reporting bugs with **supported** setups only!
|
||||
|
||||
If you have a feature or enhancement request, please use the [feature request][fr] section of our [Community Forum][fr].
|
||||
|
||||
[fr]: https://community.home-assistant.io/c/feature-requests
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Describe the issue you are experiencing
|
||||
description: Provide a clear and concise description of what the bug is.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
## Environment
|
||||
- type: input
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: What is the used version of the Supervisor?
|
||||
placeholder: supervisor-
|
||||
description: >
|
||||
Can be found in the Supervisor panel -> System tab. Starts with
|
||||
`supervisor-....`.
|
||||
- type: dropdown
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: What type of installation are you running?
|
||||
description: >
|
||||
If you don't know, you can find it in: Configuration panel -> Info.
|
||||
options:
|
||||
- Home Assistant OS
|
||||
- Home Assistant Supervised
|
||||
- type: dropdown
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Which operating system are you running on?
|
||||
options:
|
||||
- Home Assistant Operating System
|
||||
- Debian
|
||||
- Other (e.g., Raspbian/Raspberry Pi OS/Fedora)
|
||||
- type: input
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: What is the version of your installed operating system?
|
||||
placeholder: "5.11"
|
||||
description: Can be found in the Supervisor panel -> System tab.
|
||||
- type: input
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: What version of Home Assistant Core is installed?
|
||||
placeholder: core-
|
||||
description: >
|
||||
Can be found in the Supervisor panel -> System tab. Starts with
|
||||
`core-....`.
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
# Details
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Steps to reproduce the issue
|
||||
description: |
|
||||
Please tell us exactly how to reproduce your issue.
|
||||
Provide clear and concise step by step instructions and add code snippets if needed.
|
||||
value: |
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
...
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Anything in the Supervisor logs that might be useful for us?
|
||||
description: >
|
||||
The Supervisor logs can be found in the Supervisor panel -> System tab.
|
||||
render: txt
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Additional information
|
||||
description: >
|
||||
If you have any additional information for us, use the field below.
|
||||
Please note, you can attach screenshots or screen recordings here, by
|
||||
dragging and dropping files in the field below.
|
4
.github/ISSUE_TEMPLATE/config.yml
vendored
4
.github/ISSUE_TEMPLATE/config.yml
vendored
@@ -1,5 +1,9 @@
|
||||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: Report a bug/issues with an unsupported Supervisor
|
||||
url: https://community.home-assistant.io
|
||||
about: The Community guide can help or was updated to solve your issue
|
||||
|
||||
- name: Report a bug for the Supervisor panel
|
||||
url: https://github.com/home-assistant/frontend/issues
|
||||
about: The Supervisor panel is a part of the Home Assistant frontend
|
||||
|
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
@@ -37,6 +37,7 @@
|
||||
- This PR fixes or closes issue: fixes #
|
||||
- This PR is related to issue:
|
||||
- Link to documentation pull request:
|
||||
- Link to cli pull request:
|
||||
|
||||
## Checklist
|
||||
|
||||
|
47
.github/release-drafter.yml
vendored
47
.github/release-drafter.yml
vendored
@@ -1,4 +1,49 @@
|
||||
change-template: "- #$NUMBER $TITLE @$AUTHOR"
|
||||
sort-direction: ascending
|
||||
|
||||
categories:
|
||||
- title: ":boom: Breaking Changes"
|
||||
label: "breaking-change"
|
||||
|
||||
- title: ":wrench: Build"
|
||||
label: "build"
|
||||
|
||||
- title: ":boar: Chore"
|
||||
label: "chore"
|
||||
|
||||
- title: ":sparkles: New Features"
|
||||
label: "new-feature"
|
||||
|
||||
- title: ":zap: Performance"
|
||||
label: "performance"
|
||||
|
||||
- title: ":recycle: Refactor"
|
||||
label: "refactor"
|
||||
|
||||
- title: ":green_heart: CI"
|
||||
label: "ci"
|
||||
|
||||
- title: ":bug: Bug Fixes"
|
||||
label: "bugfix"
|
||||
|
||||
- title: ":white_check_mark: Test"
|
||||
label: "test"
|
||||
|
||||
- title: ":arrow_up: Dependency Updates"
|
||||
label: "dependencies"
|
||||
|
||||
include-labels:
|
||||
- "breaking-change"
|
||||
- "build"
|
||||
- "chore"
|
||||
- "performance"
|
||||
- "refactor"
|
||||
- "new-feature"
|
||||
- "bugfix"
|
||||
- "dependencies"
|
||||
- "test"
|
||||
- "ci"
|
||||
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
$CHANGES
|
||||
|
138
.github/workflows/builder.yml
vendored
138
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ on:
|
||||
paths:
|
||||
- "rootfs/**"
|
||||
- "supervisor/**"
|
||||
- build.json
|
||||
- build.yaml
|
||||
- Dockerfile
|
||||
- requirements.txt
|
||||
- setup.py
|
||||
@@ -35,6 +35,7 @@ on:
|
||||
env:
|
||||
BUILD_NAME: supervisor
|
||||
BUILD_TYPE: supervisor
|
||||
WHEELS_TAG: 3.9-alpine3.14
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -45,9 +46,10 @@ jobs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
requirements: ${{ steps.requirements.outputs.changed }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
@@ -61,6 +63,18 @@ jobs:
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
if: steps.version.outputs.publish == 'false'
|
||||
uses: jitterbit/get-changed-files@v1
|
||||
|
||||
- name: Check if requirements files changed
|
||||
id: requirements
|
||||
run: |
|
||||
if [[ "${{ steps.changed_files.outputs.all }}" =~ (requirements.txt|build.json) ]]; then
|
||||
echo "::set-output name=changed::true"
|
||||
fi
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.arch }} supervisor
|
||||
needs: init
|
||||
@@ -70,10 +84,23 @@ jobs:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Build wheels
|
||||
if: needs.init.outputs.requirements == 'true'
|
||||
uses: home-assistant/wheels@master
|
||||
with:
|
||||
tag: ${{ env.WHEELS_TAG }}
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-host: wheels.hass.io
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
wheels-user: wheels
|
||||
apk: "build-base;libffi-dev;openssl-dev;cargo"
|
||||
skip-binary: aiohttp
|
||||
requirements: "requirements.txt"
|
||||
|
||||
- name: Set version
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
@@ -82,23 +109,59 @@ jobs:
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v1
|
||||
uses: docker/login-action@v1.12.0
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: docker/login-action@v1.12.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Set build arguments
|
||||
if: needs.init.outputs.publish == 'false'
|
||||
run: echo "BUILD_ARGS=--test" >> $GITHUB_ENV
|
||||
|
||||
- name: Build supervisor
|
||||
uses: home-assistant/builder@2020.11.0
|
||||
uses: home-assistant/builder@2021.12.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
env:
|
||||
CAS_API_KEY: ${{ secrets.CAS_TOKEN }}
|
||||
|
||||
codenotary:
|
||||
name: CodeNotary signature
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v2.4.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set version
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/version@master
|
||||
with:
|
||||
type: ${{ env.BUILD_TYPE }}
|
||||
|
||||
- name: Signing image
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: home-assistant/actions/helpers/codenotary@master
|
||||
with:
|
||||
source: dir://${{ github.workspace }}
|
||||
user: ${{ secrets.VCN_USER }}
|
||||
password: ${{ secrets.VCN_PASSWORD }}
|
||||
organisation: ${{ secrets.VCN_ORG }}
|
||||
|
||||
version:
|
||||
name: Update version
|
||||
@@ -107,7 +170,7 @@ jobs:
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
|
||||
- name: Initialize git
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
@@ -128,13 +191,15 @@ jobs:
|
||||
run_supervisor:
|
||||
runs-on: ubuntu-latest
|
||||
name: Run the Supervisor
|
||||
needs: ["build"]
|
||||
needs: ["build", "codenotary", "init"]
|
||||
timeout-minutes: 60
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
|
||||
- name: Build the Supervisor
|
||||
uses: home-assistant/builder@2020.11.0
|
||||
if: needs.init.outputs.publish != 'true'
|
||||
uses: home-assistant/builder@2021.12.0
|
||||
with:
|
||||
args: |
|
||||
--test \
|
||||
@@ -142,13 +207,19 @@ jobs:
|
||||
--target /data \
|
||||
--generic runner
|
||||
|
||||
- name: Pull Supervisor
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
docker pull ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }}
|
||||
docker tag ghcr.io/home-assistant/amd64-hassio-supervisor:${{ needs.init.outputs.version }} homeassistant/amd64-hassio-supervisor:runner
|
||||
|
||||
- name: Create the Supervisor
|
||||
run: |
|
||||
mkdir -p /tmp/supervisor/data
|
||||
docker create --name hassio_supervisor \
|
||||
--privileged \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
--security-opt apparmor=unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v /run/dbus:/run/dbus \
|
||||
-v /tmp/supervisor/data:/data \
|
||||
@@ -167,22 +238,59 @@ jobs:
|
||||
SUPERVISOR=$(docker inspect --format='{{.NetworkSettings.IPAddress}}' hassio_supervisor)
|
||||
ping="error"
|
||||
while [ "$ping" != "ok" ]; do
|
||||
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r .result)
|
||||
ping=$(curl -sSL "http://$SUPERVISOR/supervisor/ping" | jq -r '.result')
|
||||
sleep 5
|
||||
done
|
||||
|
||||
- name: Check the Supervisor
|
||||
run: |
|
||||
echo "Checking supervisor info"
|
||||
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r .result)
|
||||
test=$(docker exec hassio_cli ha supervisor info --no-progress --raw-json | jq -r '.result')
|
||||
if [ "$test" != "ok" ];then
|
||||
docker logs hassio_supervisor
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Checking supervisor network info"
|
||||
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r .result)
|
||||
test=$(docker exec hassio_cli ha network info --no-progress --raw-json | jq -r '.result')
|
||||
if [ "$test" != "ok" ];then
|
||||
docker logs hassio_supervisor
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check the Store / Addon
|
||||
run: |
|
||||
echo "Install Core SSH Add-on"
|
||||
test=$(docker exec hassio_cli ha addons install core_ssh --no-progress --raw-json | jq -r '.result')
|
||||
if [ "$test" != "ok" ];then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Start Core SSH Add-on"
|
||||
test=$(docker exec hassio_cli ha addons start core_ssh --no-progress --raw-json | jq -r '.result')
|
||||
if [ "$test" != "ok" ];then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Check the Supervisor code sign
|
||||
if: needs.init.outputs.publish == 'true'
|
||||
run: |
|
||||
echo "Enable Content-Trust"
|
||||
test=$(docker exec hassio_cli ha security options --content-trust=true --no-progress --raw-json | jq -r '.result')
|
||||
if [ "$test" != "ok" ];then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Run supervisor health check"
|
||||
test=$(docker exec hassio_cli ha resolution healthcheck --no-progress --raw-json | jq -r '.result')
|
||||
if [ "$test" != "ok" ];then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Check supervisor unhealthy"
|
||||
test=$(docker exec hassio_cli ha resolution info --no-progress --raw-json | jq -r '.data.unhealthy[]')
|
||||
if [ "$test" != "" ];then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
- name: Get supervisor logs on failiure
|
||||
if: ${{ cancelled() || failure() }}
|
||||
run: docker logs hassio_supervisor
|
||||
|
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
19
.github/workflows/check_pr_labels.yml
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
name: Check PR
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
branches: ["main"]
|
||||
types: [labeled, unlabeled, synchronize]
|
||||
|
||||
jobs:
|
||||
init:
|
||||
name: Check labels
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check labels
|
||||
run: |
|
||||
labels=$(jq -r '.pull_request.labels[] | .name' ${{github.event_path }})
|
||||
echo "$labels"
|
||||
if [ "$labels" == "cla-signed" ]; then
|
||||
exit 1
|
||||
fi
|
90
.github/workflows/ci.yaml
vendored
90
.github/workflows/ci.yaml
vendored
@@ -8,8 +8,9 @@ on:
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.8
|
||||
DEFAULT_PYTHON: 3.9
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
DEFAULT_VCN: v0.9.8
|
||||
|
||||
jobs:
|
||||
# Separate job to pre-populate the base dependency cache
|
||||
@@ -18,26 +19,23 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
python-version: [3.9]
|
||||
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -47,7 +45,7 @@ jobs:
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -66,15 +64,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -95,7 +93,7 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
@@ -110,15 +108,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -130,7 +128,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -154,15 +152,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -186,15 +184,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -206,7 +204,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -227,15 +225,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -247,7 +245,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -271,15 +269,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -303,15 +301,15 @@ jobs:
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -323,7 +321,7 @@ jobs:
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
@@ -343,19 +341,23 @@ jobs:
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
python-version: [3.9]
|
||||
name: Run tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Install VCN tools
|
||||
uses: home-assistant/actions/helpers/vcn@master
|
||||
with:
|
||||
vcn_version: ${{ env.DEFAULT_VCN }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -390,7 +392,7 @@ jobs:
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v2.2.1
|
||||
uses: actions/upload-artifact@v2.3.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
@@ -401,15 +403,15 @@ jobs:
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
uses: actions/setup-python@v2.3.1
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v2.1.7
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
@@ -428,4 +430,4 @@ jobs:
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1.0.15
|
||||
uses: codecov/codecov-action@v2.1.0
|
||||
|
10
.github/workflows/lock.yml
vendored
10
.github/workflows/lock.yml
vendored
@@ -9,12 +9,12 @@ jobs:
|
||||
lock:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@v2.0.1
|
||||
- uses: dessant/lock-threads@v3
|
||||
with:
|
||||
github-token: ${{ github.token }}
|
||||
issue-lock-inactive-days: "30"
|
||||
issue-exclude-created-before: "2020-10-01T00:00:00Z"
|
||||
issue-inactive-days: "30"
|
||||
exclude-issue-created-before: "2020-10-01T00:00:00Z"
|
||||
issue-lock-reason: ""
|
||||
pr-lock-inactive-days: "1"
|
||||
pr-exclude-created-before: "2020-11-01T00:00:00Z"
|
||||
pr-inactive-days: "1"
|
||||
exclude-pr-created-before: "2020-11-01T00:00:00Z"
|
||||
pr-lock-reason: ""
|
||||
|
33
.github/workflows/release-drafter.yml
vendored
33
.github/workflows/release-drafter.yml
vendored
@@ -2,14 +2,43 @@ name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- main
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
name: Release Drafter
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@v2.4.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Find Next Version
|
||||
id: version
|
||||
run: |
|
||||
declare -i newpost
|
||||
latest=$(git describe --tags $(git rev-list --tags --max-count=1))
|
||||
latestpre=$(echo "$latest" | awk '{split($0,a,"."); print a[1] "." a[2]}')
|
||||
datepre=$(date --utc '+%Y.%m')
|
||||
|
||||
|
||||
if [[ "$latestpre" == "$datepre" ]]; then
|
||||
latestpost=$(echo "$latest" | awk '{split($0,a,"."); print a[3]}')
|
||||
newpost=$latestpost+1
|
||||
else
|
||||
newpost=0
|
||||
fi
|
||||
|
||||
echo Current version: $latest
|
||||
echo New target version: $datepre.$newpost
|
||||
echo "::set-output name=version::$datepre.$newpost"
|
||||
|
||||
- name: Run Release Drafter
|
||||
uses: release-drafter/release-drafter@v5
|
||||
with:
|
||||
tag: ${{ steps.version.outputs.version }}
|
||||
name: ${{ steps.version.outputs.version }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
4
.github/workflows/sentry.yaml
vendored
4
.github/workflows/sentry.yaml
vendored
@@ -10,9 +10,9 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
uses: actions/checkout@v2.4.0
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.1
|
||||
uses: getsentry/action-release@v1.1.6
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
|
2
.github/workflows/stale.yml
vendored
2
.github/workflows/stale.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
||||
stale:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/stale@v3.0.14
|
||||
- uses: actions/stale@v4
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
|
@@ -1,5 +1,6 @@
|
||||
ignored:
|
||||
- DL3018
|
||||
- DL3003
|
||||
- DL3006
|
||||
- DL3013
|
||||
- DL3018
|
||||
- SC2155
|
||||
|
@@ -1,13 +1,13 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 20.8b1
|
||||
rev: 21.12b0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py38
|
||||
- py39
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.3
|
||||
@@ -23,12 +23,12 @@ repos:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v4.3.21
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.9.3
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.6.2
|
||||
rev: v2.31.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
||||
args: [--py39-plus]
|
||||
|
21
.vcnignore
Normal file
21
.vcnignore
Normal file
@@ -0,0 +1,21 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# Distribution / packaging
|
||||
*.egg-info/
|
||||
|
||||
# General files
|
||||
.git
|
||||
.github
|
||||
.devcontainer
|
||||
.vscode
|
||||
.tox
|
||||
|
||||
# Data
|
||||
home-assistant-polymer/
|
||||
script/
|
||||
tests/
|
||||
data/
|
||||
venv/
|
17
.vscode/tasks.json
vendored
17
.vscode/tasks.json
vendored
@@ -2,9 +2,9 @@
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"label": "Run Supervisor",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"command": "supervisor_run",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
@@ -16,7 +16,7 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"label": "Run Supervisor CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
@@ -30,9 +30,9 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"label": "Update Supervisor Panel",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"command": "LOKALISE_TOKEN='${input:localiseToken}' ./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
@@ -86,5 +86,12 @@
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"id": "localiseToken",
|
||||
"type": "promptString",
|
||||
"description": "Paste your lokalise token to download frontend translations"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
12
Dockerfile
12
Dockerfile
@@ -1,25 +1,25 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=10000 \
|
||||
SUPERVISOR_API=http://localhost
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Install base
|
||||
RUN \
|
||||
apk add --no-cache \
|
||||
set -x \
|
||||
&& apk add --no-cache \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN \
|
||||
|
29
README.md
29
README.md
@@ -10,26 +10,23 @@ network settings or installing and updating software.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at https://home-assistant.io/hassio.
|
||||
Installation instructions can be found at https://home-assistant.io/getting-started.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the Supervisor is not difficult but tricky.
|
||||
|
||||
- You can use the builder to create your Supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Access a HassOS device or VM and pull your Supervisor.
|
||||
- Set the developer modus with the CLI tool: `ha supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service with `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
||||
For small bugfixes or improvements, make a PR. For significant changes open a RFC first, please. Thanks.
|
||||
For small changes and bugfixes you can just follow this, but for significant changes open a RFC first.
|
||||
Development instructions can be found [here][development].
|
||||
|
||||
## Release
|
||||
|
||||
Follow is the relase circle process:
|
||||
Releases are done in 3 stages (channels) with this structure:
|
||||
|
||||
1. Merge master into dev / make sure version stay on dev
|
||||
2. Merge dev into master
|
||||
3. Bump the release on master
|
||||
4. Create a GitHub Release from master with the right version tag
|
||||
1. Pull requests are merged to the `main` branch.
|
||||
2. A new build is pushed to the `dev` stage.
|
||||
3. Releases are published.
|
||||
4. A new build is pushed to the `beta` stage.
|
||||
5. The [`stable.json`][stable] file is updated.
|
||||
6. The build that was pushed to `beta` will now be pushed to `stable`.
|
||||
|
||||
[development]: https://developers.home-assistant.io/docs/supervisor/development
|
||||
[stable]: https://github.com/home-assistant/version/blob/master/stable.json
|
||||
|
@@ -1,26 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- main
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: "1.13.0-3.8-alpine3.12"
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: "home-assistant/ci-azure"
|
||||
endpoint: "home-assistant"
|
||||
|
||||
jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: "$(versionWheels)"
|
||||
builderApk: "build-base;libffi-dev;openssl-dev"
|
||||
builderPip: "Cython"
|
||||
skipBinary: "aiohttp"
|
||||
wheelsRequirement: "requirements.txt"
|
13
build.json
13
build.json
@@ -1,13 +0,0 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.8-alpine3.12",
|
||||
"armhf": "homeassistant/armhf-base-python:3.8-alpine3.12",
|
||||
"armv7": "homeassistant/armv7-base-python:3.8-alpine3.12",
|
||||
"amd64": "homeassistant/amd64-base-python:3.8-alpine3.12",
|
||||
"i386": "homeassistant/i386-base-python:3.8-alpine3.12"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "supervisor"
|
||||
}
|
||||
}
|
20
build.yaml
Normal file
20
build.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
image: homeassistant/{arch}-hassio-supervisor
|
||||
shadow_repository: ghcr.io/home-assistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-base-python:3.9-alpine3.14
|
||||
armhf: ghcr.io/home-assistant/armhf-base-python:3.9-alpine3.14
|
||||
armv7: ghcr.io/home-assistant/armv7-base-python:3.9-alpine3.14
|
||||
amd64: ghcr.io/home-assistant/amd64-base-python:3.9-alpine3.14
|
||||
i386: ghcr.io/home-assistant/i386-base-python:3.9-alpine3.14
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
labels:
|
||||
io.hass.type: supervisor
|
||||
org.opencontainers.image.title: Home Assistant Supervisor
|
||||
org.opencontainers.image.description: Container-based system for managing Home Assistant Core installation
|
||||
org.opencontainers.image.source: https://github.com/home-assistant/supervisor
|
||||
org.opencontainers.image.authors: The Home Assistant Authors
|
||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||
org.opencontainers.image.licenses: Apache License 2.0
|
Submodule home-assistant-polymer updated: 1d367eca69...2f9c088091
6
pylintrc
6
pylintrc
@@ -2,7 +2,10 @@
|
||||
reports=no
|
||||
jobs=2
|
||||
|
||||
good-names=id,i,j,k,ex,Run,_,fp,T
|
||||
good-names=id,i,j,k,ex,Run,_,fp,T,os
|
||||
|
||||
extension-pkg-whitelist=
|
||||
ciso8601
|
||||
|
||||
# Reasons disabled:
|
||||
# format - handled by black
|
||||
@@ -37,6 +40,7 @@ disable=
|
||||
too-many-return-statements,
|
||||
too-many-statements,
|
||||
unused-argument,
|
||||
consider-using-with
|
||||
|
||||
[EXCEPTIONS]
|
||||
overgeneral-exceptions=Exception
|
||||
|
@@ -1,20 +1,22 @@
|
||||
aiohttp==3.7.3
|
||||
async_timeout==3.0.1
|
||||
aiohttp==3.8.1
|
||||
async_timeout==4.0.2
|
||||
atomicwrites==1.4.0
|
||||
attrs==20.3.0
|
||||
attrs==21.2.0
|
||||
awesomeversion==22.1.0
|
||||
brotli==1.0.9
|
||||
cchardet==2.1.7
|
||||
colorlog==4.6.2
|
||||
ciso8601==2.2.0
|
||||
colorlog==6.6.0
|
||||
cpe==1.2.1
|
||||
cryptography==3.2.1
|
||||
debugpy==1.2.0
|
||||
docker==4.4.0
|
||||
gitpython==3.1.11
|
||||
jinja2==2.11.2
|
||||
packaging==20.4
|
||||
pulsectl==20.5.1
|
||||
pytz==2020.4
|
||||
cryptography==36.0.1
|
||||
debugpy==1.5.1
|
||||
deepmerge==1.0.1
|
||||
docker==5.0.3
|
||||
gitpython==3.1.26
|
||||
jinja2==3.0.3
|
||||
pulsectl==21.10.5
|
||||
pyudev==0.22.0
|
||||
ruamel.yaml==0.15.100
|
||||
sentry-sdk==0.19.4
|
||||
voluptuous==0.12.0
|
||||
ruamel.yaml==0.17.17
|
||||
sentry-sdk==1.5.2
|
||||
voluptuous==0.12.2
|
||||
dbus-next==0.2.3
|
||||
|
@@ -1,14 +1,14 @@
|
||||
black==20.8b1
|
||||
codecov==2.1.10
|
||||
coverage==5.3
|
||||
flake8-docstrings==1.5.0
|
||||
flake8==3.8.4
|
||||
pre-commit==2.9.2
|
||||
pydocstyle==5.1.1
|
||||
pylint==2.6.0
|
||||
black==21.12b0
|
||||
codecov==2.1.12
|
||||
coverage==6.2
|
||||
flake8-docstrings==1.6.0
|
||||
flake8==4.0.1
|
||||
pre-commit==2.17.0
|
||||
pydocstyle==6.1.1
|
||||
pylint==2.12.2
|
||||
pytest-aiohttp==0.3.0
|
||||
pytest-asyncio==0.12.0 # NB!: Versions over 0.12.0 breaks pytest-aiohttp (https://github.com/aio-libs/pytest-aiohttp/issues/16)
|
||||
pytest-cov==2.10.1
|
||||
pytest-timeout==1.4.2
|
||||
pytest==6.1.2
|
||||
pyupgrade==2.7.4
|
||||
pytest-cov==3.0.0
|
||||
pytest-timeout==2.0.2
|
||||
pytest==6.2.5
|
||||
pyupgrade==2.31.0
|
||||
|
@@ -2,9 +2,17 @@
|
||||
# ==============================================================================
|
||||
# Start udev service
|
||||
# ==============================================================================
|
||||
|
||||
if bashio::fs.directory_exists /run/udev && ! bashio::fs.file_exists /run/.old_udev; then
|
||||
bashio::log.info "Using udev information from host"
|
||||
bashio::exit.ok
|
||||
fi
|
||||
|
||||
bashio::log.info "Setup udev backend inside container"
|
||||
udevd --daemon
|
||||
|
||||
bashio::log.info "Update udev information"
|
||||
touch /run/.old_udev
|
||||
if udevadm trigger; then
|
||||
udevadm settle || true
|
||||
else
|
||||
|
@@ -1,135 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -eE
|
||||
|
||||
DOCKER_TIMEOUT=30
|
||||
DOCKER_PID=0
|
||||
|
||||
|
||||
function start_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Starting docker."
|
||||
dockerd 2> /dev/null &
|
||||
DOCKER_PID=$!
|
||||
|
||||
echo "Waiting for docker to initialize..."
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
until docker info >/dev/null 2>&1; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for docker to come up"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "Docker was initialized"
|
||||
}
|
||||
|
||||
|
||||
function stop_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Stopping in container docker..."
|
||||
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
|
||||
# Now wait for it to die
|
||||
kill "$DOCKER_PID"
|
||||
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for container docker to die"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "Your host might have been left with unreleased resources"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function build_supervisor() {
|
||||
docker pull homeassistant/amd64-builder:dev
|
||||
|
||||
docker run --rm --privileged \
|
||||
-v /run/docker.sock:/run/docker.sock -v "$(pwd):/data" \
|
||||
homeassistant/amd64-builder:dev \
|
||||
--generic dev -t /data --test --amd64 --no-cache
|
||||
}
|
||||
|
||||
|
||||
function cleanup_lastboot() {
|
||||
if [[ -f /workspaces/test_supervisor/config.json ]]; then
|
||||
echo "Cleaning up last boot"
|
||||
cp /workspaces/test_supervisor/config.json /tmp/config.json
|
||||
jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json
|
||||
rm /tmp/config.json
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function cleanup_docker() {
|
||||
echo "Cleaning up stopped containers..."
|
||||
docker rm $(docker ps -a -q) || true
|
||||
}
|
||||
|
||||
|
||||
function setup_test_env() {
|
||||
mkdir -p /workspaces/test_supervisor
|
||||
|
||||
echo "Start Supervisor"
|
||||
docker run --rm --privileged \
|
||||
--name hassio_supervisor \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v /run/dbus:/run/dbus \
|
||||
-v "/workspaces/test_supervisor":/data \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-e SUPERVISOR_SHARE="/workspaces/test_supervisor" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e SUPERVISOR_MACHINE="qemux86-64" \
|
||||
homeassistant/amd64-hassio-supervisor:latest
|
||||
|
||||
}
|
||||
|
||||
|
||||
function init_dbus() {
|
||||
if pgrep dbus-daemon; then
|
||||
echo "Dbus is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup dbus"
|
||||
mkdir -p /var/lib/dbus
|
||||
cp -f /etc/machine-id /var/lib/dbus/machine-id
|
||||
|
||||
# cleanups
|
||||
mkdir -p /run/dbus
|
||||
rm -f /run/dbus/pid
|
||||
|
||||
# run
|
||||
dbus-daemon --system --print-address
|
||||
}
|
||||
|
||||
echo "Start Test-Env"
|
||||
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
docker system prune -f
|
||||
|
||||
build_supervisor
|
||||
cleanup_lastboot
|
||||
cleanup_docker
|
||||
init_dbus
|
||||
setup_test_env
|
||||
stop_docker
|
@@ -1,4 +1,6 @@
|
||||
#!/bin/bash
|
||||
source "/etc/supervisor_scripts/common"
|
||||
|
||||
set -e
|
||||
|
||||
# Update frontend
|
||||
@@ -9,6 +11,10 @@ cd home-assistant-polymer
|
||||
nvm install
|
||||
script/bootstrap
|
||||
|
||||
# Download translations
|
||||
start_docker
|
||||
./script/translations_download
|
||||
|
||||
# build frontend
|
||||
cd hassio
|
||||
./script/build_hassio
|
||||
@@ -16,3 +22,9 @@ cd hassio
|
||||
# Copy frontend
|
||||
rm -rf ../../supervisor/api/panel/*
|
||||
cp -rf build/* ../../supervisor/api/panel/
|
||||
|
||||
# Reset frontend git
|
||||
cd ..
|
||||
git reset --hard HEAD
|
||||
|
||||
stop_docker
|
@@ -4,9 +4,8 @@ include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
line_length=88
|
||||
indent = " "
|
||||
not_skip = __init__.py
|
||||
force_sort_within_sections = true
|
||||
sections = FUTURE,STDLIB,INBETWEENS,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
sections = FUTURE,STDLIB,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
default_section = THIRDPARTY
|
||||
forced_separate = tests
|
||||
combine_as_imports = true
|
||||
|
6
setup.py
6
setup.py
@@ -33,8 +33,9 @@ setup(
|
||||
packages=[
|
||||
"supervisor.addons",
|
||||
"supervisor.api",
|
||||
"supervisor.backups",
|
||||
"supervisor.dbus.network",
|
||||
"supervisor.dbus.payloads",
|
||||
"supervisor.dbus.network.setting",
|
||||
"supervisor.dbus",
|
||||
"supervisor.discovery.services",
|
||||
"supervisor.discovery",
|
||||
@@ -44,11 +45,12 @@ setup(
|
||||
"supervisor.jobs",
|
||||
"supervisor.misc",
|
||||
"supervisor.plugins",
|
||||
"supervisor.resolution.checks",
|
||||
"supervisor.resolution.evaluations",
|
||||
"supervisor.resolution.fixups",
|
||||
"supervisor.resolution",
|
||||
"supervisor.services.modules",
|
||||
"supervisor.services",
|
||||
"supervisor.snapshots",
|
||||
"supervisor.store",
|
||||
"supervisor.utils",
|
||||
"supervisor",
|
||||
|
@@ -39,6 +39,7 @@ if __name__ == "__main__":
|
||||
|
||||
_LOGGER.info("Initializing Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.set_debug(coresys.config.debug)
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
|
@@ -3,13 +3,14 @@ import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Dict, List, Optional, Union
|
||||
from typing import Optional, Union
|
||||
|
||||
from ..const import AddonBoot, AddonStartup, AddonState
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsJobError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerAPIError,
|
||||
@@ -37,17 +38,17 @@ class AddonManager(CoreSysAttributes):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.data: AddonsData = AddonsData(coresys)
|
||||
self.local: Dict[str, Addon] = {}
|
||||
self.store: Dict[str, AddonStore] = {}
|
||||
self.local: dict[str, Addon] = {}
|
||||
self.store: dict[str, AddonStore] = {}
|
||||
|
||||
@property
|
||||
def all(self) -> List[AnyAddon]:
|
||||
def all(self) -> list[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
addons: Dict[str, AnyAddon] = {**self.store, **self.local}
|
||||
addons: dict[str, AnyAddon] = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
def installed(self) -> List[Addon]:
|
||||
def installed(self) -> list[Addon]:
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
@@ -88,7 +89,7 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
async def boot(self, stage: AddonStartup) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks: List[Addon] = []
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.boot != AddonBoot.AUTO or addon.startup != stage:
|
||||
continue
|
||||
@@ -122,7 +123,7 @@ class AddonManager(CoreSysAttributes):
|
||||
|
||||
async def shutdown(self, stage: AddonStartup) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks: List[Addon] = []
|
||||
tasks: list[Addon] = []
|
||||
for addon in self.installed:
|
||||
if addon.state != AddonState.STARTED or addon.startup != stage:
|
||||
continue
|
||||
@@ -147,22 +148,22 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
if slug in self.local:
|
||||
_LOGGER.warning("Add-on %s is already installed", slug)
|
||||
return
|
||||
raise AddonsError(f"Add-on {slug} is already installed", _LOGGER.warning)
|
||||
store = self.store.get(slug)
|
||||
|
||||
if not store:
|
||||
_LOGGER.error("Add-on %s not exists", slug)
|
||||
raise AddonsError()
|
||||
raise AddonsError(f"Add-on {slug} does not exist", _LOGGER.error)
|
||||
|
||||
if not store.available:
|
||||
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||
raise AddonsNotSupportedError()
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {slug} not supported on this platform", _LOGGER.error
|
||||
)
|
||||
|
||||
self.data.install(store)
|
||||
addon = Addon(self.coresys, slug)
|
||||
@@ -248,42 +249,51 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def update(self, slug: str) -> None:
|
||||
async def update(self, slug: str, backup: Optional[bool] = False) -> None:
|
||||
"""Update add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
if addon.version == store.version:
|
||||
_LOGGER.warning("No update available for add-on %s", slug)
|
||||
return
|
||||
raise AddonsError(f"No update available for add-on {slug}", _LOGGER.warning)
|
||||
|
||||
# Check if available, Maybe something have changed
|
||||
if not store.available:
|
||||
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||
raise AddonsNotSupportedError()
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {slug} not supported on that platform", _LOGGER.error
|
||||
)
|
||||
|
||||
if backup:
|
||||
await self.sys_backups.do_backup_partial(
|
||||
name=f"addon_{addon.slug}_{addon.version}",
|
||||
homeassistant=False,
|
||||
addons=[addon.slug],
|
||||
)
|
||||
|
||||
# Update instance
|
||||
last_state: AddonState = addon.state
|
||||
old_image = addon.image
|
||||
try:
|
||||
await addon.instance.update(store.version, store.image)
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerError):
|
||||
await addon.instance.cleanup()
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
else:
|
||||
self.data.update(store)
|
||||
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||
self.data.update(store)
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerError):
|
||||
await addon.instance.cleanup(old_image=old_image)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
@@ -297,27 +307,30 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
raise AddonsError(f"Add-on {slug} is not installed", _LOGGER.error)
|
||||
addon = self.local[slug]
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
raise AddonsError(
|
||||
f"Add-on {slug} is not available inside store", _LOGGER.error
|
||||
)
|
||||
store = self.store[slug]
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
_LOGGER.error("Version changed, use Update instead Rebuild")
|
||||
raise AddonsError()
|
||||
raise AddonsError(
|
||||
"Version changed, use Update instead Rebuild", _LOGGER.error
|
||||
)
|
||||
if not addon.need_build:
|
||||
_LOGGER.error("Can't rebuild a image based add-on")
|
||||
raise AddonsNotSupportedError()
|
||||
raise AddonsNotSupportedError(
|
||||
"Can't rebuild a image based add-on", _LOGGER.error
|
||||
)
|
||||
|
||||
# remove docker container but not addon config
|
||||
last_state: AddonState = addon.state
|
||||
@@ -339,7 +352,8 @@ class AddonManager(CoreSysAttributes):
|
||||
JobCondition.FREE_SPACE,
|
||||
JobCondition.INTERNET_HOST,
|
||||
JobCondition.HEALTHY,
|
||||
]
|
||||
],
|
||||
on_condition=AddonsJobError,
|
||||
)
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
@@ -366,7 +380,7 @@ class AddonManager(CoreSysAttributes):
|
||||
@Job(conditions=[JobCondition.FREE_SPACE, JobCondition.INTERNET_HOST])
|
||||
async def repair(self) -> None:
|
||||
"""Repair local add-ons."""
|
||||
needs_repair: List[Addon] = []
|
||||
needs_repair: list[Addon] = []
|
||||
|
||||
# Evaluate Add-ons to repair
|
||||
for addon in self.installed:
|
||||
|
@@ -10,9 +10,10 @@ import secrets
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
from typing import Any, Awaitable, Final, Optional
|
||||
|
||||
import aiohttp
|
||||
from deepmerge import Merger
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
@@ -22,6 +23,8 @@ from ..const import (
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_DATA,
|
||||
ATTR_EVENT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
@@ -32,8 +35,10 @@ from ..const import (
|
||||
ATTR_PORTS,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SLUG,
|
||||
ATTR_STATE,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TYPE,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
@@ -50,18 +55,22 @@ from ..exceptions import (
|
||||
AddonConfigurationError,
|
||||
AddonsError,
|
||||
AddonsNotSupportedError,
|
||||
ConfigurationFileError,
|
||||
DockerError,
|
||||
DockerRequestError,
|
||||
HostAppArmorError,
|
||||
JsonFileError,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from ..homeassistant.const import WSEvent, WSType
|
||||
from ..utils import check_port
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from ..utils.tar import atomic_contents_add, secure_path
|
||||
from .const import AddonBackupMode
|
||||
from .model import AddonModel, Data
|
||||
from .options import AddonOptions
|
||||
from .utils import remove_data
|
||||
from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options
|
||||
from .validate import SCHEMA_ADDON_BACKUP
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -72,13 +81,19 @@ RE_WEBUI = re.compile(
|
||||
|
||||
RE_WATCHDOG = re.compile(
|
||||
r"^(?:(?P<s_prefix>https?|tcp)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$"
|
||||
r":\/\/\[HOST\]:(?:\[PORT:)?(?P<t_port>\d+)\]?(?P<s_suffix>.*)$"
|
||||
)
|
||||
|
||||
RE_OLD_AUDIO = re.compile(r"\d+,\d+")
|
||||
|
||||
WATCHDOG_TIMEOUT = aiohttp.ClientTimeout(total=10)
|
||||
|
||||
_OPTIONS_MERGER: Final = Merger(
|
||||
type_strategies=[(dict, ["merge"])],
|
||||
fallback_strategies=["override"],
|
||||
type_conflict_strategies=["override"],
|
||||
)
|
||||
|
||||
|
||||
class Addon(AddonModel):
|
||||
"""Hold data for add-on inside Supervisor."""
|
||||
@@ -87,12 +102,34 @@ class Addon(AddonModel):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(coresys, slug)
|
||||
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||
self.state: AddonState = AddonState.UNKNOWN
|
||||
self._state: AddonState = AddonState.UNKNOWN
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return internal representation."""
|
||||
return f"<Addon: {self.slug}>"
|
||||
|
||||
@property
|
||||
def state(self) -> AddonState:
|
||||
"""Return state of the add-on."""
|
||||
return self._state
|
||||
|
||||
@state.setter
|
||||
def state(self, new_state: AddonState) -> None:
|
||||
"""Set the add-on into new state."""
|
||||
if self._state == new_state:
|
||||
return
|
||||
self._state = new_state
|
||||
self.sys_homeassistant.websocket.send_message(
|
||||
{
|
||||
ATTR_TYPE: WSType.SUPERVISOR_EVENT,
|
||||
ATTR_DATA: {
|
||||
ATTR_EVENT: WSEvent.ADDON,
|
||||
ATTR_SLUG: self.slug,
|
||||
ATTR_STATE: new_state,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool:
|
||||
"""Return True if a task is in progress."""
|
||||
@@ -101,7 +138,7 @@ class Addon(AddonModel):
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
with suppress(DockerError):
|
||||
await self.instance.attach(tag=self.version)
|
||||
await self.instance.attach(version=self.version)
|
||||
|
||||
# Evaluate state
|
||||
if await self.instance.is_running():
|
||||
@@ -157,17 +194,19 @@ class Addon(AddonModel):
|
||||
return self.version != self.latest_version
|
||||
|
||||
@property
|
||||
def dns(self) -> List[str]:
|
||||
def dns(self) -> list[str]:
|
||||
"""Return list of DNS name for that add-on."""
|
||||
return [f"{self.hostname}.{DNS_SUFFIX}"]
|
||||
|
||||
@property
|
||||
def options(self) -> Dict[str, Any]:
|
||||
def options(self) -> dict[str, Any]:
|
||||
"""Return options with local changes."""
|
||||
return {**self.data[ATTR_OPTIONS], **self.persist[ATTR_OPTIONS]}
|
||||
return _OPTIONS_MERGER.merge(
|
||||
deepcopy(self.data[ATTR_OPTIONS]), deepcopy(self.persist[ATTR_OPTIONS])
|
||||
)
|
||||
|
||||
@options.setter
|
||||
def options(self, value: Optional[Dict[str, Any]]) -> None:
|
||||
def options(self, value: Optional[dict[str, Any]]) -> None:
|
||||
"""Store user add-on options."""
|
||||
self.persist[ATTR_OPTIONS] = {} if value is None else deepcopy(value)
|
||||
|
||||
@@ -244,12 +283,12 @@ class Addon(AddonModel):
|
||||
self.persist[ATTR_PROTECTED] = value
|
||||
|
||||
@property
|
||||
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||
def ports(self) -> Optional[dict[str, Optional[int]]]:
|
||||
"""Return ports of add-on."""
|
||||
return self.persist.get(ATTR_NETWORK, super().ports)
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value: Optional[Dict[str, Optional[int]]]) -> None:
|
||||
def ports(self, value: Optional[dict[str, Optional[int]]]) -> None:
|
||||
"""Set custom ports of add-on."""
|
||||
if value is None:
|
||||
self.persist.pop(ATTR_NETWORK, None)
|
||||
@@ -394,6 +433,24 @@ class Addon(AddonModel):
|
||||
"""Return path to asound config for Docker."""
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_pulse")
|
||||
|
||||
@property
|
||||
def devices(self) -> set[Device]:
|
||||
"""Extract devices from add-on options."""
|
||||
options_schema = self.schema
|
||||
with suppress(vol.Invalid):
|
||||
options_schema.validate(self.options)
|
||||
|
||||
return options_schema.devices
|
||||
|
||||
@property
|
||||
def pwned(self) -> set[str]:
|
||||
"""Extract pwned data for add-on options."""
|
||||
options_schema = self.schema
|
||||
with suppress(vol.Invalid):
|
||||
options_schema.validate(self.options)
|
||||
|
||||
return options_schema.pwned
|
||||
|
||||
def save_persist(self) -> None:
|
||||
"""Save data of add-on."""
|
||||
self.sys_addons.data.save_data()
|
||||
@@ -406,7 +463,7 @@ class Addon(AddonModel):
|
||||
application = RE_WATCHDOG.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = application.group("t_port")
|
||||
t_port = int(application.group("t_port"))
|
||||
t_proto = application.group("t_proto")
|
||||
s_prefix = application.group("s_prefix") or ""
|
||||
s_suffix = application.group("s_suffix") or ""
|
||||
@@ -430,8 +487,8 @@ class Addon(AddonModel):
|
||||
# Make HTTP request
|
||||
try:
|
||||
url = f"{proto}://{self.ip_address}:{port}{s_suffix}"
|
||||
async with self.sys_websession_ssl.get(
|
||||
url, timeout=WATCHDOG_TIMEOUT
|
||||
async with self.sys_websession.get(
|
||||
url, timeout=WATCHDOG_TIMEOUT, ssl=False
|
||||
) as req:
|
||||
if req.status < 300:
|
||||
return True
|
||||
@@ -442,22 +499,19 @@ class Addon(AddonModel):
|
||||
|
||||
async def write_options(self) -> None:
|
||||
"""Return True if add-on options is written to data."""
|
||||
schema = self.schema
|
||||
options = self.options
|
||||
|
||||
# Update secrets for validation
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
|
||||
try:
|
||||
options = schema(options)
|
||||
options = self.schema.validate(self.options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
"Add-on %s has invalid options: %s",
|
||||
self.slug,
|
||||
humanize_error(options, ex),
|
||||
humanize_error(self.options, ex),
|
||||
)
|
||||
except JsonFileError:
|
||||
except ConfigurationFileError:
|
||||
_LOGGER.error("Add-on %s can't write options", self.slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s write options: %s", self.slug, options)
|
||||
@@ -485,8 +539,7 @@ class Addon(AddonModel):
|
||||
|
||||
# Write pulse config
|
||||
try:
|
||||
with self.path_pulse.open("w") as config_file:
|
||||
config_file.write(pulse_config)
|
||||
self.path_pulse.write_text(pulse_config, encoding="utf-8")
|
||||
except OSError as err:
|
||||
_LOGGER.error(
|
||||
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
||||
@@ -534,11 +587,15 @@ class Addon(AddonModel):
|
||||
return True
|
||||
|
||||
# merge options
|
||||
options = {**self.persist[ATTR_OPTIONS], **default_options}
|
||||
options = _OPTIONS_MERGER.merge(
|
||||
deepcopy(default_options), deepcopy(self.persist[ATTR_OPTIONS])
|
||||
)
|
||||
|
||||
# create voluptuous
|
||||
new_schema = vol.Schema(
|
||||
vol.All(dict, validate_options(self.coresys, new_raw_schema))
|
||||
vol.All(
|
||||
dict, AddonOptions(self.coresys, new_raw_schema, self.name, self.slug)
|
||||
)
|
||||
)
|
||||
|
||||
# validate
|
||||
@@ -570,7 +627,7 @@ class Addon(AddonModel):
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerRequestError as err:
|
||||
self.state = AddonState.STOPPED
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
except DockerError as err:
|
||||
self.state = AddonState.ERROR
|
||||
@@ -581,8 +638,9 @@ class Addon(AddonModel):
|
||||
async def stop(self) -> None:
|
||||
"""Stop add-on."""
|
||||
try:
|
||||
return await self.instance.stop()
|
||||
await self.instance.stop()
|
||||
except DockerRequestError as err:
|
||||
self.state = AddonState.ERROR
|
||||
raise AddonsError() from err
|
||||
except DockerError as err:
|
||||
self.state = AddonState.ERROR
|
||||
@@ -623,16 +681,34 @@ class Addon(AddonModel):
|
||||
Return a coroutine.
|
||||
"""
|
||||
if not self.with_stdin:
|
||||
_LOGGER.error("Add-on %s does not support writing to stdin!", self.slug)
|
||||
raise AddonsNotSupportedError()
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {self.slug} does not support writing to stdin!", _LOGGER.error
|
||||
)
|
||||
|
||||
try:
|
||||
return await self.instance.write_stdin(data)
|
||||
except DockerError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
async def snapshot(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Snapshot state of an add-on."""
|
||||
async def _backup_command(self, command: str) -> None:
|
||||
try:
|
||||
command_return = await self.instance.run_inside(command)
|
||||
if command_return.exit_code != 0:
|
||||
_LOGGER.error(
|
||||
"Pre-/Post backup command returned error code: %s",
|
||||
command_return.exit_code,
|
||||
)
|
||||
raise AddonsError()
|
||||
except DockerError as err:
|
||||
_LOGGER.error(
|
||||
"Failed running pre-/post backup command %s: %s", command, err
|
||||
)
|
||||
raise AddonsError() from err
|
||||
|
||||
async def backup(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Backup state of an add-on."""
|
||||
is_running = await self.is_running()
|
||||
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
temp_path = Path(temp)
|
||||
|
||||
@@ -653,71 +729,95 @@ class Addon(AddonModel):
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(temp_path.joinpath("addon.json"), data)
|
||||
except JsonFileError as err:
|
||||
_LOGGER.error("Can't save meta for %s", self.slug)
|
||||
raise AddonsError() from err
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError(
|
||||
f"Can't save meta for {self.slug}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
profile = temp_path.joinpath("apparmor.txt")
|
||||
try:
|
||||
self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
await self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError as err:
|
||||
_LOGGER.error("Can't backup AppArmor profile")
|
||||
raise AddonsError() from err
|
||||
raise AddonsError(
|
||||
"Can't backup AppArmor profile", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# write into tarfile
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
with tar_file as snapshot:
|
||||
# Snapshot system
|
||||
with tar_file as backup:
|
||||
# Backup system
|
||||
|
||||
snapshot.add(temp, arcname=".")
|
||||
backup.add(temp, arcname=".")
|
||||
|
||||
# Snapshot data
|
||||
# Backup data
|
||||
atomic_contents_add(
|
||||
snapshot,
|
||||
backup,
|
||||
self.path_data,
|
||||
excludes=self.snapshot_exclude,
|
||||
excludes=self.backup_exclude,
|
||||
arcname="data",
|
||||
)
|
||||
|
||||
if (
|
||||
is_running
|
||||
and self.backup_mode == AddonBackupMode.HOT
|
||||
and self.backup_pre is not None
|
||||
):
|
||||
await self._backup_command(self.backup_pre)
|
||||
elif is_running and self.backup_mode == AddonBackupMode.COLD:
|
||||
_LOGGER.info("Shutdown add-on %s for cold backup", self.slug)
|
||||
await self.instance.stop()
|
||||
|
||||
try:
|
||||
_LOGGER.info("Building snapshot for add-on %s", self.slug)
|
||||
_LOGGER.info("Building backup for add-on %s", self.slug)
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||
raise AddonsError() from err
|
||||
raise AddonsError(
|
||||
f"Can't write tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
finally:
|
||||
if (
|
||||
is_running
|
||||
and self.backup_mode == AddonBackupMode.HOT
|
||||
and self.backup_post is not None
|
||||
):
|
||||
await self._backup_command(self.backup_post)
|
||||
elif is_running and self.backup_mode is AddonBackupMode.COLD:
|
||||
_LOGGER.info("Starting add-on %s again", self.slug)
|
||||
await self.start()
|
||||
|
||||
_LOGGER.info("Finish snapshot for addon %s", self.slug)
|
||||
_LOGGER.info("Finish backup for addon %s", self.slug)
|
||||
|
||||
async def restore(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
# extract snapshot
|
||||
# extract backup
|
||||
def _extract_tarfile():
|
||||
"""Extract tar snapshot."""
|
||||
with tar_file as snapshot:
|
||||
snapshot.extractall(path=Path(temp), members=secure_path(snapshot))
|
||||
"""Extract tar backup."""
|
||||
with tar_file as backup:
|
||||
backup.extractall(path=Path(temp), members=secure_path(backup))
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||
raise AddonsError() from err
|
||||
raise AddonsError(
|
||||
f"Can't read tarfile {tar_file}: {err}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# Read snapshot data
|
||||
# Read backup data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except JsonFileError as err:
|
||||
except ConfigurationFileError as err:
|
||||
raise AddonsError() from err
|
||||
|
||||
# Validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||
data = SCHEMA_ADDON_BACKUP(data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error(
|
||||
"Can't validate %s, snapshot data: %s",
|
||||
"Can't validate %s, backup data: %s",
|
||||
self.slug,
|
||||
humanize_error(data, err),
|
||||
)
|
||||
@@ -725,8 +825,10 @@ class Addon(AddonModel):
|
||||
|
||||
# If available
|
||||
if not self._available(data[ATTR_SYSTEM]):
|
||||
_LOGGER.error("Add-on %s is not available for this platform", self.slug)
|
||||
raise AddonsNotSupportedError()
|
||||
raise AddonsNotSupportedError(
|
||||
f"Add-on {self.slug} is not available for this platform",
|
||||
_LOGGER.error,
|
||||
)
|
||||
|
||||
# Restore local add-on information
|
||||
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||
@@ -759,7 +861,11 @@ class Addon(AddonModel):
|
||||
# Restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
shutil.copytree(Path(temp, "data"), self.path_data, symlinks=True)
|
||||
temp_data = Path(temp, "data")
|
||||
if temp_data.is_dir():
|
||||
shutil.copytree(temp_data, self.path_data, symlinks=True)
|
||||
else:
|
||||
self.path_data.mkdir()
|
||||
|
||||
_LOGGER.info("Restoring data for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
@@ -767,8 +873,9 @@ class Addon(AddonModel):
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data: %s", err)
|
||||
raise AddonsError() from err
|
||||
raise AddonsError(
|
||||
f"Can't restore origin data: {err}", _LOGGER.error
|
||||
) from err
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(temp, "apparmor.txt")
|
||||
|
@@ -2,18 +2,28 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
|
||||
from awesomeversion import AwesomeVersion
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARGS,
|
||||
ATTR_BUILD_FROM,
|
||||
ATTR_LABELS,
|
||||
ATTR_SQUASH,
|
||||
FILE_SUFFIX_CONFIGURATION,
|
||||
META_ADDON,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from ..exceptions import ConfigurationFileError
|
||||
from ..utils.common import FileConfiguration, find_one_filetype
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AnyAddon
|
||||
|
||||
|
||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
class AddonBuild(FileConfiguration, CoreSysAttributes):
|
||||
"""Handle build options for add-ons."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
|
||||
@@ -21,9 +31,14 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
self.coresys: CoreSys = coresys
|
||||
self.addon = addon
|
||||
|
||||
super().__init__(
|
||||
Path(self.addon.path_location, "build.json"), SCHEMA_BUILD_CONFIG
|
||||
)
|
||||
try:
|
||||
build_file = find_one_filetype(
|
||||
self.addon.path_location, "build", FILE_SUFFIX_CONFIGURATION
|
||||
)
|
||||
except ConfigurationFileError:
|
||||
build_file = self.addon.path_location / "build.json"
|
||||
|
||||
super().__init__(build_file, SCHEMA_BUILD_CONFIG)
|
||||
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
@@ -32,9 +47,12 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
"""Return base image for this add-on."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
|
||||
)
|
||||
if not self._data[ATTR_BUILD_FROM]:
|
||||
return f"ghcr.io/home-assistant/{self.sys_arch.default}-base:latest"
|
||||
|
||||
# Evaluate correct base image
|
||||
arch = self.sys_arch.match(list(self._data[ATTR_BUILD_FROM].keys()))
|
||||
return self._data[ATTR_BUILD_FROM][arch]
|
||||
|
||||
@property
|
||||
def squash(self) -> bool:
|
||||
@@ -42,17 +60,32 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
return self._data[ATTR_SQUASH]
|
||||
|
||||
@property
|
||||
def additional_args(self) -> Dict[str, str]:
|
||||
def additional_args(self) -> dict[str, str]:
|
||||
"""Return additional Docker build arguments."""
|
||||
return self._data[ATTR_ARGS]
|
||||
|
||||
def get_docker_args(self, version):
|
||||
@property
|
||||
def additional_labels(self) -> dict[str, str]:
|
||||
"""Return additional Docker labels."""
|
||||
return self._data[ATTR_LABELS]
|
||||
|
||||
@property
|
||||
def is_valid(self) -> bool:
|
||||
"""Return true if the build env is valid."""
|
||||
return all(
|
||||
[
|
||||
self.addon.path_location.is_dir(),
|
||||
Path(self.addon.path_location, "Dockerfile").is_file(),
|
||||
]
|
||||
)
|
||||
|
||||
def get_docker_args(self, version: AwesomeVersion):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{self.addon.image}:{version}",
|
||||
"tag": f"{self.addon.image}:{version!s}",
|
||||
"pull": True,
|
||||
"forcerm": True,
|
||||
"forcerm": not self.sys_dev,
|
||||
"squash": self.squash,
|
||||
"labels": {
|
||||
"io.hass.version": version,
|
||||
@@ -60,6 +93,7 @@ class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"io.hass.type": META_ADDON,
|
||||
"io.hass.name": self._fix_label("name"),
|
||||
"io.hass.description": self._fix_label("description"),
|
||||
**self.additional_labels,
|
||||
},
|
||||
"buildargs": {
|
||||
"BUILD_FROM": self.base_image,
|
||||
|
12
supervisor/addons/const.py
Normal file
12
supervisor/addons/const.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""Add-on static data."""
|
||||
from enum import Enum
|
||||
|
||||
|
||||
class AddonBackupMode(str, Enum):
|
||||
"""Backup mode of an Add-on."""
|
||||
|
||||
HOT = "hot"
|
||||
COLD = "cold"
|
||||
|
||||
|
||||
ATTR_BACKUP = "backup"
|
@@ -1,7 +1,6 @@
|
||||
"""Init file for Supervisor add-on data."""
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from ..const import (
|
||||
ATTR_IMAGE,
|
||||
@@ -13,16 +12,14 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..store.addon import AddonStore
|
||||
from ..utils.json import JsonConfig
|
||||
from ..utils.common import FileConfiguration
|
||||
from .addon import Addon
|
||||
from .validate import SCHEMA_ADDONS_FILE
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
Config = Dict[str, Any]
|
||||
Config = dict[str, Any]
|
||||
|
||||
|
||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
class AddonsData(FileConfiguration, CoreSysAttributes):
|
||||
"""Hold data for installed Add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
|
@@ -1,10 +1,11 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
from abc import ABC, abstractmethod
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
from typing import Any, Awaitable, Optional
|
||||
|
||||
from packaging import version as pkg_version
|
||||
import voluptuous as vol
|
||||
from awesomeversion import AwesomeVersion, AwesomeVersionException
|
||||
|
||||
from supervisor.addons.const import AddonBackupMode
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADVANCED,
|
||||
@@ -12,7 +13,9 @@ from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_BACKUP_EXCLUDE,
|
||||
ATTR_BACKUP_POST,
|
||||
ATTR_BACKUP_PRE,
|
||||
ATTR_BOOT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
@@ -32,7 +35,9 @@ from ..const import (
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_STREAM,
|
||||
ATTR_INIT,
|
||||
ATTR_JOURNALD,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
@@ -46,16 +51,18 @@ from ..const import (
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_REALTIME,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOT_EXCLUDE,
|
||||
ATTR_STAGE,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STDIN,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_TRANSLATIONS,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
@@ -71,9 +78,12 @@ from ..const import (
|
||||
AddonStartup,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .validate import RE_SERVICE, RE_VOLUME, schema_ui_options, validate_options
|
||||
from ..docker.const import Capabilities
|
||||
from .const import ATTR_BACKUP
|
||||
from .options import AddonOptions, UiOptions
|
||||
from .validate import RE_SERVICE, RE_VOLUME
|
||||
|
||||
Data = Dict[str, Any]
|
||||
Data = dict[str, Any]
|
||||
|
||||
|
||||
class AddonModel(CoreSysAttributes, ABC):
|
||||
@@ -105,7 +115,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self._available(self.data)
|
||||
|
||||
@property
|
||||
def options(self) -> Dict[str, Any]:
|
||||
def options(self) -> dict[str, Any]:
|
||||
"""Return options with local changes."""
|
||||
return self.data[ATTR_OPTIONS]
|
||||
|
||||
@@ -130,7 +140,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.slug.replace("_", "-")
|
||||
|
||||
@property
|
||||
def dns(self) -> List[str]:
|
||||
def dns(self) -> list[str]:
|
||||
"""Return list of DNS name for that add-on."""
|
||||
return []
|
||||
|
||||
@@ -174,8 +184,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return None
|
||||
|
||||
# Return data
|
||||
with readme.open("r") as readme_file:
|
||||
return readme_file.read()
|
||||
return readme.read_text(encoding="utf-8")
|
||||
|
||||
@property
|
||||
def repository(self) -> str:
|
||||
@@ -183,12 +192,17 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
def translations(self) -> dict:
|
||||
"""Return add-on translations."""
|
||||
return self.data[ATTR_TRANSLATIONS]
|
||||
|
||||
@property
|
||||
def latest_version(self) -> AwesomeVersion:
|
||||
"""Return latest version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def version(self) -> Optional[str]:
|
||||
def version(self) -> AwesomeVersion:
|
||||
"""Return version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@@ -213,7 +227,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_STAGE]
|
||||
|
||||
@property
|
||||
def services_role(self) -> Dict[str, str]:
|
||||
def services_role(self) -> dict[str, str]:
|
||||
"""Return dict of services with rights."""
|
||||
services_list = self.data.get(ATTR_SERVICES, [])
|
||||
|
||||
@@ -226,17 +240,17 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return services
|
||||
|
||||
@property
|
||||
def discovery(self) -> List[str]:
|
||||
def discovery(self) -> list[str]:
|
||||
"""Return list of discoverable components/platforms."""
|
||||
return self.data.get(ATTR_DISCOVERY, [])
|
||||
|
||||
@property
|
||||
def ports_description(self) -> Optional[Dict[str, str]]:
|
||||
def ports_description(self) -> Optional[dict[str, str]]:
|
||||
"""Return descriptions of ports."""
|
||||
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
||||
|
||||
@property
|
||||
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||
def ports(self) -> Optional[dict[str, Optional[int]]]:
|
||||
"""Return ports of add-on."""
|
||||
return self.data.get(ATTR_PORTS)
|
||||
|
||||
@@ -296,22 +310,17 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self) -> List[str]:
|
||||
"""Return devices of add-on."""
|
||||
return self.data.get(ATTR_DEVICES, [])
|
||||
def static_devices(self) -> list[Path]:
|
||||
"""Return static devices of add-on."""
|
||||
return [Path(node) for node in self.data.get(ATTR_DEVICES, [])]
|
||||
|
||||
@property
|
||||
def tmpfs(self) -> Optional[str]:
|
||||
"""Return tmpfs of add-on."""
|
||||
return self.data.get(ATTR_TMPFS)
|
||||
|
||||
@property
|
||||
def environment(self) -> Optional[Dict[str, str]]:
|
||||
def environment(self) -> Optional[dict[str, str]]:
|
||||
"""Return environment of add-on."""
|
||||
return self.data.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@property
|
||||
def privileged(self) -> List[str]:
|
||||
def privileged(self) -> list[Capabilities]:
|
||||
"""Return list of privilege."""
|
||||
return self.data.get(ATTR_PRIVILEGED, [])
|
||||
|
||||
@@ -350,9 +359,24 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.data[ATTR_HASSIO_ROLE]
|
||||
|
||||
@property
|
||||
def snapshot_exclude(self) -> List[str]:
|
||||
"""Return Exclude list for snapshot."""
|
||||
return self.data.get(ATTR_SNAPSHOT_EXCLUDE, [])
|
||||
def backup_exclude(self) -> list[str]:
|
||||
"""Return Exclude list for backup."""
|
||||
return self.data.get(ATTR_BACKUP_EXCLUDE, [])
|
||||
|
||||
@property
|
||||
def backup_pre(self) -> Optional[str]:
|
||||
"""Return pre-backup command."""
|
||||
return self.data.get(ATTR_BACKUP_PRE)
|
||||
|
||||
@property
|
||||
def backup_post(self) -> Optional[str]:
|
||||
"""Return post-backup command."""
|
||||
return self.data.get(ATTR_BACKUP_POST)
|
||||
|
||||
@property
|
||||
def backup_mode(self) -> AddonBackupMode:
|
||||
"""Return if backup is hot/cold."""
|
||||
return self.data[ATTR_BACKUP]
|
||||
|
||||
@property
|
||||
def default_init(self) -> bool:
|
||||
@@ -374,6 +398,11 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_stream(self) -> bool:
|
||||
"""Return True if post requests to ingress should be streamed."""
|
||||
return self.data[ATTR_INGRESS_STREAM]
|
||||
|
||||
@property
|
||||
def with_gpio(self) -> bool:
|
||||
"""Return True if the add-on access to GPIO interface."""
|
||||
@@ -387,7 +416,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
@property
|
||||
def with_uart(self) -> bool:
|
||||
"""Return True if we should map all UART device."""
|
||||
return self.data[ATTR_AUTO_UART]
|
||||
return self.data[ATTR_UART]
|
||||
|
||||
@property
|
||||
def with_udev(self) -> bool:
|
||||
@@ -399,6 +428,11 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Return True if the add-on access to kernel modules."""
|
||||
return self.data[ATTR_KERNEL_MODULES]
|
||||
|
||||
@property
|
||||
def with_realtime(self) -> bool:
|
||||
"""Return True if the add-on need realtime schedule functions."""
|
||||
return self.data[ATTR_REALTIME]
|
||||
|
||||
@property
|
||||
def with_full_access(self) -> bool:
|
||||
"""Return True if the add-on want full access to hardware."""
|
||||
@@ -409,6 +443,11 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self.data[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def with_tmpfs(self) -> Optional[str]:
|
||||
"""Return if tmp is in memory of add-on."""
|
||||
return self.data[ATTR_TMPFS]
|
||||
|
||||
@property
|
||||
def access_auth_api(self) -> bool:
|
||||
"""Return True if the add-on access to login/auth backend."""
|
||||
@@ -455,12 +494,12 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return self.path_documentation.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self) -> List[str]:
|
||||
def supported_arch(self) -> list[str]:
|
||||
"""Return list of supported arch."""
|
||||
return self.data[ATTR_ARCH]
|
||||
|
||||
@property
|
||||
def supported_machine(self) -> List[str]:
|
||||
def supported_machine(self) -> list[str]:
|
||||
"""Return list of supported machine."""
|
||||
return self.data.get(ATTR_MACHINE, [])
|
||||
|
||||
@@ -475,7 +514,7 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return ATTR_IMAGE not in self.data
|
||||
|
||||
@property
|
||||
def map_volumes(self) -> Dict[str, str]:
|
||||
def map_volumes(self) -> dict[str, str]:
|
||||
"""Return a dict of {volume: policy} from add-on."""
|
||||
volumes = {}
|
||||
for volume in self.data[ATTR_MAP]:
|
||||
@@ -517,22 +556,27 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return Path(self.path_location, "apparmor.txt")
|
||||
|
||||
@property
|
||||
def schema(self) -> vol.Schema:
|
||||
"""Create a schema for add-on options."""
|
||||
def schema(self) -> AddonOptions:
|
||||
"""Return Addon options validation object."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(self.coresys, raw_schema)))
|
||||
raw_schema = {}
|
||||
|
||||
return AddonOptions(self.coresys, raw_schema, self.name, self.slug)
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> Optional[List[Dict[str, Any]]]:
|
||||
def schema_ui(self) -> Optional[list[dict[any, any]]]:
|
||||
"""Create a UI schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return None
|
||||
return schema_ui_options(raw_schema)
|
||||
return UiOptions(self.coresys)(raw_schema)
|
||||
|
||||
@property
|
||||
def with_journald(self) -> bool:
|
||||
"""Return True if the add-on accesses the system journal."""
|
||||
return self.data[ATTR_JOURNALD]
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
@@ -554,15 +598,10 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
return False
|
||||
|
||||
# Home Assistant
|
||||
version = config.get(ATTR_HOMEASSISTANT)
|
||||
if version is None or self.sys_homeassistant.version is None:
|
||||
return True
|
||||
|
||||
version: Optional[AwesomeVersion] = config.get(ATTR_HOMEASSISTANT)
|
||||
try:
|
||||
return pkg_version.parse(
|
||||
self.sys_homeassistant.version
|
||||
) >= pkg_version.parse(version)
|
||||
except pkg_version.InvalidVersion:
|
||||
return self.sys_homeassistant.version >= version
|
||||
except (AwesomeVersionException, TypeError):
|
||||
return True
|
||||
|
||||
def _image(self, config) -> str:
|
||||
@@ -583,9 +622,9 @@ class AddonModel(CoreSysAttributes, ABC):
|
||||
"""Uninstall this add-on."""
|
||||
return self.sys_addons.uninstall(self.slug)
|
||||
|
||||
def update(self) -> Awaitable[None]:
|
||||
def update(self, backup: Optional[bool] = False) -> Awaitable[None]:
|
||||
"""Update this add-on."""
|
||||
return self.sys_addons.update(self.slug)
|
||||
return self.sys_addons.update(self.slug, backup=backup)
|
||||
|
||||
def rebuild(self) -> Awaitable[None]:
|
||||
"""Rebuild this add-on."""
|
||||
|
422
supervisor/addons/options.py
Normal file
422
supervisor/addons/options.py
Normal file
@@ -0,0 +1,422 @@
|
||||
"""Add-on Options / UI rendering."""
|
||||
import hashlib
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from typing import Any, Union
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import HardwareNotFound
|
||||
from ..hardware.const import UdevSubsystem
|
||||
from ..hardware.data import Device
|
||||
from ..validate import network_port
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
_STR = "str"
|
||||
_INT = "int"
|
||||
_FLOAT = "float"
|
||||
_BOOL = "bool"
|
||||
_PASSWORD = "password"
|
||||
_EMAIL = "email"
|
||||
_URL = "url"
|
||||
_PORT = "port"
|
||||
_MATCH = "match"
|
||||
_LIST = "list"
|
||||
_DEVICE = "device"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool"
|
||||
r"|email"
|
||||
r"|url"
|
||||
r"|port"
|
||||
r"|device(?:\((?P<filter>subsystem=[a-z]+)\))?"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r"|list\((?P<list>.+)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
|
||||
class AddonOptions(CoreSysAttributes):
|
||||
"""Validate Add-ons Options."""
|
||||
|
||||
def __init__(
|
||||
self, coresys: CoreSys, raw_schema: dict[str, Any], name: str, slug: str
|
||||
):
|
||||
"""Validate schema."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.raw_schema: dict[str, Any] = raw_schema
|
||||
self.devices: set[Device] = set()
|
||||
self.pwned: set[str] = set()
|
||||
self._name = name
|
||||
self._slug = slug
|
||||
|
||||
@property
|
||||
def validate(self) -> vol.Schema:
|
||||
"""Create a schema for add-on options."""
|
||||
return vol.Schema(vol.All(dict, self))
|
||||
|
||||
def __call__(self, struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in self.raw_schema:
|
||||
_LOGGER.warning(
|
||||
"Option '%s' does not exist in the schema for %s (%s)",
|
||||
key,
|
||||
self._name,
|
||||
self._slug,
|
||||
)
|
||||
continue
|
||||
|
||||
typ = self.raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = self._nested_validate_list(typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = self._nested_validate_dict(typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = self._single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(
|
||||
f"Type error for option '{key}' in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
self._check_missing_options(self.raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
def _single_validate(self, typ: str, value: Any, key: str):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(
|
||||
f"Missing required option '{key}' in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
# Lookup secret
|
||||
if str(value).startswith("!secret "):
|
||||
secret: str = value.partition(" ")[2]
|
||||
value = self.sys_homeassistant.secrets.get(secret)
|
||||
if value is None:
|
||||
raise vol.Invalid(
|
||||
f"Unknown secret '{secret}' in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
if not match:
|
||||
raise vol.Invalid(
|
||||
f"Unknown type '{typ}' in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(_STR) or typ.startswith(_PASSWORD):
|
||||
if typ.startswith(_PASSWORD) and value:
|
||||
self.pwned.add(hashlib.sha1(str(value).encode()).hexdigest())
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(_PORT):
|
||||
return network_port(value)
|
||||
elif typ.startswith(_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(_LIST):
|
||||
return vol.In(match.group("list").split("|"))(str(value))
|
||||
elif typ.startswith(_DEVICE):
|
||||
try:
|
||||
device = self.sys_hardware.get_by_path(Path(value))
|
||||
except HardwareNotFound:
|
||||
raise vol.Invalid(
|
||||
f"Device '{value}' does not exist in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
# Have filter
|
||||
if match.group("filter"):
|
||||
str_filter = match.group("filter")
|
||||
device_filter = _create_device_filter(str_filter)
|
||||
if device not in self.sys_hardware.filter_devices(**device_filter):
|
||||
raise vol.Invalid(
|
||||
f"Device '{value}' don't match the filter {str_filter}! in {self._name} ({self._slug})"
|
||||
)
|
||||
|
||||
# Device valid
|
||||
self.devices.add(device)
|
||||
return str(device.path)
|
||||
|
||||
raise vol.Invalid(
|
||||
f"Fatal error for option '{key}' with type '{typ}' in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
def _nested_validate_list(self, typ: Any, data_list: list[Any], key: str):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
# Make sure it is a list
|
||||
if not isinstance(data_list, list):
|
||||
raise vol.Invalid(
|
||||
f"Invalid list for option '{key}' in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
# Process list
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = self._nested_validate_dict(typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(self._single_validate(typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
def _nested_validate_dict(
|
||||
self, typ: dict[Any, Any], data_dict: dict[Any, Any], key: str
|
||||
):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
# Make sure it is a dict
|
||||
if not isinstance(data_dict, dict):
|
||||
raise vol.Invalid(
|
||||
f"Invalid dict for option '{key}' in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
# Process dict
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning(
|
||||
"Unknown option '%s' for %s (%s)", c_key, self._name, self._slug
|
||||
)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = self._nested_validate_list(
|
||||
typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = self._single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
self._check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
def _check_missing_options(
|
||||
self, origin: dict[Any, Any], exists: dict[Any, Any], root: str
|
||||
) -> None:
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
miss_schema = origin[miss_opt]
|
||||
|
||||
# If its a list then value in list decides if its optional like ["str?"]
|
||||
if isinstance(miss_schema, list) and len(miss_schema) > 0:
|
||||
miss_schema = miss_schema[0]
|
||||
|
||||
if isinstance(miss_schema, str) and miss_schema.endswith("?"):
|
||||
continue
|
||||
|
||||
raise vol.Invalid(
|
||||
f"Missing option '{miss_opt}' in {root} in {self._name} ({self._slug})"
|
||||
) from None
|
||||
|
||||
|
||||
class UiOptions(CoreSysAttributes):
|
||||
"""Render UI Add-ons Options."""
|
||||
|
||||
def __init__(self, coresys: CoreSys) -> None:
|
||||
"""Initialize UI option render."""
|
||||
self.coresys = coresys
|
||||
|
||||
def __call__(self, raw_schema: dict[str, Any]) -> list[dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema: list[dict[str, Any]] = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
self._nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
self._nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
self._single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
def _single_ui_option(
|
||||
self,
|
||||
ui_schema: list[dict[str, Any]],
|
||||
value: str,
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node: dict[str, Union[str, bool, float, list[str]]] = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
if not match:
|
||||
return
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
elif value.startswith(_DEVICE):
|
||||
ui_node["type"] = "select"
|
||||
|
||||
# Have filter
|
||||
if match.group("filter"):
|
||||
device_filter = _create_device_filter(match.group("filter"))
|
||||
ui_node["options"] = [
|
||||
(device.by_id or device.path).as_posix()
|
||||
for device in self.sys_hardware.filter_devices(**device_filter)
|
||||
]
|
||||
else:
|
||||
ui_node["options"] = [
|
||||
(device.by_id or device.path).as_posix()
|
||||
for device in self.sys_hardware.devices
|
||||
]
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
def _nested_ui_list(
|
||||
self,
|
||||
ui_schema: list[dict[str, Any]],
|
||||
option_list: list[Any],
|
||||
key: str,
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
self._nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
self._single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
def _nested_ui_dict(
|
||||
self,
|
||||
ui_schema: list[dict[str, Any]],
|
||||
option_dict: dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {
|
||||
"name": key,
|
||||
"type": "schema",
|
||||
"optional": True,
|
||||
"multiple": multiple,
|
||||
}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
self._nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
self._single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _create_device_filter(str_filter: str) -> dict[str, Any]:
|
||||
"""Generate device Filter."""
|
||||
raw_filter = dict(value.split("=") for value in str_filter.split(";"))
|
||||
|
||||
clean_filter = {}
|
||||
for key, value in raw_filter.items():
|
||||
if key == "subsystem":
|
||||
clean_filter[key] = UdevSubsystem(value)
|
||||
else:
|
||||
clean_filter[key] = value
|
||||
|
||||
return clean_filter
|
@@ -6,18 +6,8 @@ import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import (
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
ROLE_ADMIN,
|
||||
ROLE_MANAGER,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
from ..const import ROLE_ADMIN, ROLE_MANAGER, SECURITY_DISABLE, SECURITY_PROFILE
|
||||
from ..docker.const import Capabilities
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .model import AddonModel
|
||||
@@ -46,16 +36,19 @@ def rating_security(addon: AddonModel) -> int:
|
||||
rating += 1
|
||||
|
||||
# Privileged options
|
||||
if any(
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
if (
|
||||
any(
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
Capabilities.NET_ADMIN,
|
||||
Capabilities.SYS_ADMIN,
|
||||
Capabilities.SYS_RAWIO,
|
||||
Capabilities.SYS_PTRACE,
|
||||
Capabilities.SYS_MODULE,
|
||||
Capabilities.DAC_READ_SEARCH,
|
||||
)
|
||||
)
|
||||
or addon.with_kernel_modules
|
||||
):
|
||||
rating += -1
|
||||
|
||||
@@ -73,12 +66,8 @@ def rating_security(addon: AddonModel) -> int:
|
||||
if addon.host_pid:
|
||||
rating += -2
|
||||
|
||||
# Full Access
|
||||
if addon.with_full_access:
|
||||
rating += -2
|
||||
|
||||
# Docker Access
|
||||
if addon.access_docker_api:
|
||||
# Docker Access & full Access
|
||||
if addon.access_docker_api or addon.with_full_access:
|
||||
rating = 1
|
||||
|
||||
return max(min(6, rating), 1)
|
||||
|
@@ -2,11 +2,13 @@
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
from typing import Any, Dict, List, Union
|
||||
from typing import Any
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from supervisor.addons.const import AddonBackupMode
|
||||
|
||||
from ..const import (
|
||||
ARCH_ALL,
|
||||
ATTR_ACCESS_TOKEN,
|
||||
@@ -18,10 +20,13 @@ from ..const import (
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BACKUP_EXCLUDE,
|
||||
ATTR_BACKUP_POST,
|
||||
ATTR_BACKUP_PRE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD_FROM,
|
||||
ATTR_CONFIGURATION,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
@@ -43,9 +48,12 @@ from ..const import (
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_STREAM,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_INIT,
|
||||
ATTR_JOURNALD,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LABELS,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
@@ -60,11 +68,11 @@ from ..const import (
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REALTIME,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOT_EXCLUDE,
|
||||
ATTR_SQUASH,
|
||||
ATTR_STAGE,
|
||||
ATTR_STARTUP,
|
||||
@@ -73,6 +81,8 @@ from ..const import (
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_TRANSLATIONS,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USB,
|
||||
@@ -82,7 +92,6 @@ from ..const import (
|
||||
ATTR_VIDEO,
|
||||
ATTR_WATCHDOG,
|
||||
ATTR_WEBUI,
|
||||
PRIVILEGED_ALL,
|
||||
ROLE_ALL,
|
||||
ROLE_DEFAULT,
|
||||
AddonBoot,
|
||||
@@ -90,9 +99,10 @@ from ..const import (
|
||||
AddonStartup,
|
||||
AddonState,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..docker.const import Capabilities
|
||||
from ..validate import (
|
||||
docker_image,
|
||||
docker_ports,
|
||||
docker_ports_description,
|
||||
network_port,
|
||||
@@ -100,51 +110,15 @@ from ..validate import (
|
||||
uuid_match,
|
||||
version_tag,
|
||||
)
|
||||
from .const import ATTR_BACKUP
|
||||
from .options import RE_SCHEMA_ELEMENT
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share|media)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = "str"
|
||||
V_INT = "int"
|
||||
V_FLOAT = "float"
|
||||
V_BOOL = "bool"
|
||||
V_PASSWORD = "password"
|
||||
V_EMAIL = "email"
|
||||
V_URL = "url"
|
||||
V_PORT = "port"
|
||||
V_MATCH = "match"
|
||||
V_LIST = "list"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool"
|
||||
r"|email"
|
||||
r"|url"
|
||||
r"|port"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r"|list\((?P<list>.+)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||
)
|
||||
@@ -154,6 +128,7 @@ SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
RE_MACHINE = re.compile(
|
||||
r"^!?(?:"
|
||||
r"|intel-nuc"
|
||||
r"|generic-x86-64"
|
||||
r"|odroid-c2"
|
||||
r"|odroid-c4"
|
||||
r"|odroid-n2"
|
||||
@@ -173,34 +148,126 @@ RE_MACHINE = re.compile(
|
||||
)
|
||||
|
||||
|
||||
def _simple_startup(value) -> str:
|
||||
"""Define startup schema."""
|
||||
if value == "before":
|
||||
return AddonStartup.SERVICES.value
|
||||
if value == "after":
|
||||
return AddonStartup.APPLICATION.value
|
||||
return value
|
||||
def _warn_addon_config(config: dict[str, Any]):
|
||||
"""Warn about miss configs."""
|
||||
name = config.get(ATTR_NAME)
|
||||
if not name:
|
||||
raise vol.Invalid("Invalid Add-on config!")
|
||||
|
||||
if config.get(ATTR_FULL_ACCESS, False) and (
|
||||
config.get(ATTR_DEVICES)
|
||||
or config.get(ATTR_UART)
|
||||
or config.get(ATTR_USB)
|
||||
or config.get(ATTR_GPIO)
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Add-on have full device access, and selective device access in the configuration. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
|
||||
if config.get(ATTR_BACKUP, AddonBackupMode.HOT) == AddonBackupMode.COLD and (
|
||||
config.get(ATTR_BACKUP_POST) or config.get(ATTR_BACKUP_PRE)
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Add-on which only support COLD backups trying to use post/pre commands. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def _migrate_addon_config(protocol=False):
|
||||
"""Migrate addon config."""
|
||||
|
||||
def _migrate(config: dict[str, Any]):
|
||||
name = config.get(ATTR_NAME)
|
||||
if not name:
|
||||
raise vol.Invalid("Invalid Add-on config!")
|
||||
|
||||
# Startup 2018-03-30
|
||||
if config.get(ATTR_STARTUP) in ("before", "after"):
|
||||
value = config[ATTR_STARTUP]
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'startup' with '%s' is deprecated. Please report this to the maintainer of %s",
|
||||
value,
|
||||
name,
|
||||
)
|
||||
if value == "before":
|
||||
config[ATTR_STARTUP] = AddonStartup.SERVICES.value
|
||||
elif value == "after":
|
||||
config[ATTR_STARTUP] = AddonStartup.APPLICATION.value
|
||||
|
||||
# UART 2021-01-20
|
||||
if "auto_uart" in config:
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'auto_uart' is deprecated, use 'uart'. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_UART] = config.pop("auto_uart")
|
||||
|
||||
# Device 2021-01-20
|
||||
if ATTR_DEVICES in config and any(":" in line for line in config[ATTR_DEVICES]):
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'devices' use a deprecated format, the new format uses a list of paths only. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_DEVICES] = [line.split(":")[0] for line in config[ATTR_DEVICES]]
|
||||
|
||||
# TMPFS 2021-02-01
|
||||
if ATTR_TMPFS in config and not isinstance(config[ATTR_TMPFS], bool):
|
||||
if protocol:
|
||||
_LOGGER.warning(
|
||||
"Add-on config 'tmpfs' use a deprecated format, new it's only a boolean. Please report this to the maintainer of %s",
|
||||
name,
|
||||
)
|
||||
config[ATTR_TMPFS] = True
|
||||
|
||||
# 2021-06 "snapshot" renamed to "backup"
|
||||
for entry in (
|
||||
"snapshot_exclude",
|
||||
"snapshot_post",
|
||||
"snapshot_pre",
|
||||
"snapshot",
|
||||
):
|
||||
if entry in config:
|
||||
new_entry = entry.replace("snapshot", "backup")
|
||||
config[new_entry] = config.pop(entry)
|
||||
_LOGGER.warning(
|
||||
"Add-on config '%s' is deprecated, '%s' should be used instead. Please report this to the maintainer of %s",
|
||||
entry,
|
||||
new_entry,
|
||||
name,
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
return _migrate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
_SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.All(version_tag, str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_NAME): str,
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
vol.Required(ATTR_SLUG): str,
|
||||
vol.Required(ATTR_DESCRIPTON): str,
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): vol.All([vol.Match(RE_MACHINE)], vol.Unique()),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.Coerce(AddonStartup)),
|
||||
vol.Required(ATTR_BOOT): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_STARTUP, default=AddonStartup.APPLICATION): vol.Coerce(
|
||||
AddonStartup
|
||||
),
|
||||
vol.Optional(ATTR_BOOT, default=AddonBoot.AUTO): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STAGE, default=AddonStage.STABLE): vol.Coerce(AddonStage),
|
||||
vol.Optional(ATTR_PORTS): docker_ports,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): docker_ports_description,
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Match(
|
||||
r"^(?:https?|\[PROTO:\w+\]|tcp):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||
r"^(?:https?|\[PROTO:\w+\]|tcp):\/\/\[HOST\]:(\[PORT:\d+\]|\d+).*$"
|
||||
),
|
||||
vol.Optional(ATTR_WEBUI): vol.Match(
|
||||
r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||
@@ -209,30 +276,32 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||
network_port, vol.Equal(0)
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): str,
|
||||
vol.Optional(ATTR_INGRESS_STREAM, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): str,
|
||||
vol.Optional(ATTR_PANEL_TITLE): str,
|
||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): version_tag,
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [str],
|
||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_TMPFS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): str},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.Coerce(Capabilities)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_VIDEO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_USB, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_REALTIME, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
@@ -242,39 +311,43 @@ SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [vol.Coerce(str)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(
|
||||
vol.Optional(ATTR_BACKUP_EXCLUDE): [str],
|
||||
vol.Optional(ATTR_BACKUP_PRE): str,
|
||||
vol.Optional(ATTR_BACKUP_POST): str,
|
||||
vol.Optional(ATTR_BACKUP, default=AddonBackupMode.HOT): vol.Coerce(
|
||||
AddonBackupMode
|
||||
),
|
||||
vol.Optional(ATTR_OPTIONS, default={}): dict,
|
||||
vol.Optional(ATTR_SCHEMA, default={}): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
str: vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
[
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
|
||||
)
|
||||
},
|
||||
{str: vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])},
|
||||
)
|
||||
],
|
||||
vol.Schema(
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
vol.Schema({str: vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}),
|
||||
)
|
||||
}
|
||||
),
|
||||
False,
|
||||
),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||
),
|
||||
vol.Optional(ATTR_JOURNALD, default=False): vol.Boolean(),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
SCHEMA_ADDON_CONFIG = vol.All(
|
||||
_migrate_addon_config(True), _warn_addon_config, _SCHEMA_ADDON_CONFIG
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
@@ -283,9 +356,25 @@ SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
|
||||
),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema(
|
||||
{vol.Coerce(str): vol.Coerce(str)}
|
||||
),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({str: str}),
|
||||
vol.Optional(ATTR_LABELS, default=dict): vol.Schema({str: str}),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
SCHEMA_TRANSLATION_CONFIGURATION = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): str,
|
||||
vol.Optional(ATTR_DESCRIPTON): vol.Maybe(str),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_TRANSLATIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CONFIGURATION): {str: SCHEMA_TRANSLATION_CONFIGURATION},
|
||||
vol.Optional(ATTR_NETWORK): {str: str},
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
@@ -294,19 +383,17 @@ SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||
str
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): str,
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_NETWORK): docker_ports,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG, default=False): vol.Boolean(),
|
||||
@@ -314,285 +401,35 @@ SCHEMA_ADDON_USER = vol.Schema(
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
}
|
||||
SCHEMA_ADDON_SYSTEM = vol.All(
|
||||
_migrate_addon_config(),
|
||||
_SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): str,
|
||||
vol.Required(ATTR_REPOSITORY): str,
|
||||
vol.Required(ATTR_TRANSLATIONS, default=dict): {
|
||||
str: SCHEMA_ADDON_TRANSLATIONS
|
||||
},
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDONS_FILE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.Coerce(AddonState),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_USER, default=dict): {str: SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {str: SCHEMA_ADDON_SYSTEM},
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def validate_options(coresys: CoreSys, raw_schema: Dict[str, Any]):
|
||||
"""Validate schema."""
|
||||
|
||||
def validate(struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in raw_schema:
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = _nested_validate_list(coresys, typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = _nested_validate_dict(coresys, typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(coresys, typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'") from None
|
||||
|
||||
# Lookup secret
|
||||
if str(value).startswith("!secret "):
|
||||
secret: str = value.partition(" ")[2]
|
||||
value = coresys.homeassistant.secrets.get(secret)
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Unknown secret {secret}") from None
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
if not match:
|
||||
raise vol.Invalid(f"Unknown type {typ}") from None
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR) or typ.startswith(V_PASSWORD):
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(V_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(V_PORT):
|
||||
return network_port(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(V_LIST):
|
||||
return vol.In(match.group("list").split("|"))(str(value))
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}") from None
|
||||
|
||||
|
||||
def _nested_validate_list(coresys, typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
# Make sure it is a list
|
||||
if not isinstance(data_list, list):
|
||||
raise vol.Invalid(f"Invalid list for {key}") from None
|
||||
|
||||
# Process list
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = _nested_validate_dict(coresys, typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(_single_validate(coresys, typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _nested_validate_dict(coresys, typ, data_dict, key):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
# Make sure it is a dict
|
||||
if not isinstance(data_dict, dict):
|
||||
raise vol.Invalid(f"Invalid dict for {key}") from None
|
||||
|
||||
# Process dict
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(
|
||||
coresys, typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = _single_validate(coresys, typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}") from None
|
||||
|
||||
|
||||
def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema: List[Dict[str, Any]] = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
_nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
_nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
_single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
|
||||
def _single_ui_option(
|
||||
ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node: Dict[str, Union[str, bool, float, List[str]]] = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
if not match:
|
||||
return
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(V_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(V_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(V_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(V_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(V_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(V_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _nested_ui_list(
|
||||
ui_schema: List[Dict[str, Any]], option_list: List[Any], key: str
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
_nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
_single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
|
||||
def _nested_ui_dict(
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_dict: Dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {"name": key, "type": "schema", "optional": True, "multiple": multiple}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
_nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
_single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
||||
SCHEMA_ADDON_BACKUP = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.Coerce(AddonState),
|
||||
vol.Required(ATTR_VERSION): version_tag,
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
@@ -9,6 +9,7 @@ from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
from .backups import APIBackups
|
||||
from .cli import APICli
|
||||
from .discovery import APIDiscovery
|
||||
from .dns import APICoreDNS
|
||||
@@ -19,15 +20,16 @@ from .host import APIHost
|
||||
from .info import APIInfo
|
||||
from .ingress import APIIngress
|
||||
from .jobs import APIJobs
|
||||
from .middleware.security import SecurityMiddleware
|
||||
from .multicast import APIMulticast
|
||||
from .network import APINetwork
|
||||
from .observer import APIObserver
|
||||
from .os import APIOS
|
||||
from .proxy import APIProxy
|
||||
from .resolution import APIResoulution
|
||||
from .security import SecurityMiddleware
|
||||
from .security import APISecurity
|
||||
from .services import APIServices
|
||||
from .snapshots import APISnapshots
|
||||
from .store import APIStore
|
||||
from .supervisor import APISupervisor
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
@@ -60,6 +62,7 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_addons()
|
||||
self._register_audio()
|
||||
self._register_auth()
|
||||
self._register_backups()
|
||||
self._register_cli()
|
||||
self._register_discovery()
|
||||
self._register_dns()
|
||||
@@ -78,8 +81,9 @@ class RestAPI(CoreSysAttributes):
|
||||
self._register_proxy()
|
||||
self._register_resolution()
|
||||
self._register_services()
|
||||
self._register_snapshots()
|
||||
self._register_supervisor()
|
||||
self._register_store()
|
||||
self._register_security()
|
||||
|
||||
await self.start()
|
||||
|
||||
@@ -141,6 +145,20 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/os/info", api_os.info),
|
||||
web.post("/os/update", api_os.update),
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
web.post("/os/datadisk/move", api_os.migrate_data),
|
||||
web.get("/os/datadisk/list", api_os.list_data),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_security(self) -> None:
|
||||
"""Register Security functions."""
|
||||
api_security = APISecurity()
|
||||
api_security.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/security/info", api_security.info),
|
||||
web.post("/security/options", api_security.options),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -207,7 +225,6 @@ class RestAPI(CoreSysAttributes):
|
||||
[
|
||||
web.get("/hardware/info", api_hardware.info),
|
||||
web.get("/hardware/audio", api_hardware.audio),
|
||||
web.post("/hardware/trigger", api_hardware.trigger),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -226,6 +243,10 @@ class RestAPI(CoreSysAttributes):
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/resolution/info", api_resolution.info),
|
||||
web.post(
|
||||
"/resolution/check/{check}/options", api_resolution.options_check
|
||||
),
|
||||
web.post("/resolution/check/{check}/run", api_resolution.run_check),
|
||||
web.post(
|
||||
"/resolution/suggestion/{suggestion}",
|
||||
api_resolution.apply_suggestion,
|
||||
@@ -238,6 +259,7 @@ class RestAPI(CoreSysAttributes):
|
||||
"/resolution/issue/{issue}",
|
||||
api_resolution.dismiss_issue,
|
||||
),
|
||||
web.post("/resolution/healthcheck", api_resolution.healthcheck),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -248,6 +270,7 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/auth", api_auth.auth),
|
||||
web.post("/auth", api_auth.auth),
|
||||
web.post("/auth/reset", api_auth.reset),
|
||||
web.delete("/auth/cache", api_auth.cache),
|
||||
@@ -261,6 +284,10 @@ class RestAPI(CoreSysAttributes):
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get(
|
||||
"/supervisor/available_updates", api_supervisor.available_updates
|
||||
),
|
||||
web.post("/refresh_updates", api_supervisor.reload),
|
||||
web.get("/supervisor/ping", api_supervisor.ping),
|
||||
web.get("/supervisor/info", api_supervisor.info),
|
||||
web.get("/supervisor/stats", api_supervisor.stats),
|
||||
@@ -337,16 +364,15 @@ class RestAPI(CoreSysAttributes):
|
||||
web.get("/addons", api_addons.list),
|
||||
web.post("/addons/reload", api_addons.reload),
|
||||
web.get("/addons/{addon}/info", api_addons.info),
|
||||
web.post("/addons/{addon}/install", api_addons.install),
|
||||
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||
web.post("/addons/{addon}/start", api_addons.start),
|
||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||
web.post("/addons/{addon}/update", api_addons.update),
|
||||
web.post("/addons/{addon}/options", api_addons.options),
|
||||
web.post(
|
||||
"/addons/{addon}/options/validate", api_addons.options_validate
|
||||
),
|
||||
web.get("/addons/{addon}/options/config", api_addons.options_config),
|
||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||
web.get("/addons/{addon}/icon", api_addons.icon),
|
||||
@@ -373,30 +399,41 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
def _register_snapshots(self) -> None:
|
||||
"""Register snapshots functions."""
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
def _register_backups(self) -> None:
|
||||
"""Register backups functions."""
|
||||
api_backups = APIBackups()
|
||||
api_backups.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/snapshots", api_snapshots.list),
|
||||
web.post("/snapshots/reload", api_snapshots.reload),
|
||||
web.post("/snapshots/new/full", api_snapshots.snapshot_full),
|
||||
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
|
||||
web.post("/snapshots/new/upload", api_snapshots.upload),
|
||||
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
|
||||
web.delete("/snapshots/{snapshot}", api_snapshots.remove),
|
||||
web.get("/snapshots", api_backups.list),
|
||||
web.post("/snapshots/reload", api_backups.reload),
|
||||
web.post("/snapshots/new/full", api_backups.backup_full),
|
||||
web.post("/snapshots/new/partial", api_backups.backup_partial),
|
||||
web.post("/snapshots/new/upload", api_backups.upload),
|
||||
web.get("/snapshots/{slug}/info", api_backups.info),
|
||||
web.delete("/snapshots/{slug}", api_backups.remove),
|
||||
web.post("/snapshots/{slug}/restore/full", api_backups.restore_full),
|
||||
web.post(
|
||||
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
|
||||
"/snapshots/{slug}/restore/partial",
|
||||
api_backups.restore_partial,
|
||||
),
|
||||
web.get("/snapshots/{slug}/download", api_backups.download),
|
||||
web.post("/snapshots/{slug}/remove", api_backups.remove),
|
||||
# June 2021: /snapshots was renamed to /backups
|
||||
web.get("/backups", api_backups.list),
|
||||
web.post("/backups/reload", api_backups.reload),
|
||||
web.post("/backups/new/full", api_backups.backup_full),
|
||||
web.post("/backups/new/partial", api_backups.backup_partial),
|
||||
web.post("/backups/new/upload", api_backups.upload),
|
||||
web.get("/backups/{slug}/info", api_backups.info),
|
||||
web.delete("/backups/{slug}", api_backups.remove),
|
||||
web.post("/backups/{slug}/restore/full", api_backups.restore_full),
|
||||
web.post(
|
||||
"/snapshots/{snapshot}/restore/partial",
|
||||
api_snapshots.restore_partial,
|
||||
"/backups/{slug}/restore/partial",
|
||||
api_backups.restore_partial,
|
||||
),
|
||||
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
|
||||
# Old, remove at end of 2020
|
||||
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
|
||||
web.get("/backups/{slug}/download", api_backups.download),
|
||||
]
|
||||
)
|
||||
|
||||
@@ -467,6 +504,46 @@ class RestAPI(CoreSysAttributes):
|
||||
]
|
||||
)
|
||||
|
||||
def _register_store(self) -> None:
|
||||
"""Register store endpoints."""
|
||||
api_store = APIStore()
|
||||
api_store.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/store", api_store.store_info),
|
||||
web.get("/store/addons", api_store.addons_list),
|
||||
web.get("/store/addons/{addon}", api_store.addons_addon_info),
|
||||
web.get("/store/addons/{addon}/{version}", api_store.addons_addon_info),
|
||||
web.post(
|
||||
"/store/addons/{addon}/install", api_store.addons_addon_install
|
||||
),
|
||||
web.post(
|
||||
"/store/addons/{addon}/install/{version}",
|
||||
api_store.addons_addon_install,
|
||||
),
|
||||
web.post("/store/addons/{addon}/update", api_store.addons_addon_update),
|
||||
web.post(
|
||||
"/store/addons/{addon}/update/{version}",
|
||||
api_store.addons_addon_update,
|
||||
),
|
||||
web.post("/store/reload", api_store.reload),
|
||||
web.get("/store/repositories", api_store.repositories_list),
|
||||
web.get(
|
||||
"/store/repositories/{repository}",
|
||||
api_store.repositories_repository_info,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
# Reroute from legacy
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.post("/addons/{addon}/install", api_store.addons_addon_install),
|
||||
web.post("/addons/{addon}/update", api_store.addons_addon_update),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict, List
|
||||
from typing import Any, Awaitable
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -71,6 +71,7 @@ from ..const import (
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_PWNED,
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_REPOSITORY,
|
||||
@@ -82,6 +83,8 @@ from ..const import (
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_TRANSLATIONS,
|
||||
ATTR_UART,
|
||||
ATTR_UDEV,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_URL,
|
||||
@@ -101,13 +104,13 @@ from ..const import (
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import APIError
|
||||
from ..exceptions import APIError, APIForbidden, PwnedError, PwnedSecret
|
||||
from ..validate import docker_ports
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from .utils import api_process, api_process_raw, api_validate, json_loads
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): str})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
@@ -115,8 +118,8 @@ SCHEMA_OPTIONS = vol.Schema(
|
||||
vol.Optional(ATTR_BOOT): vol.Coerce(AddonBoot),
|
||||
vol.Optional(ATTR_NETWORK): vol.Maybe(docker_ports),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
}
|
||||
@@ -153,7 +156,7 @@ class APIAddons(CoreSysAttributes):
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def list(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = [
|
||||
{
|
||||
@@ -170,6 +173,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_INSTALLED: addon.is_installed,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
@@ -197,7 +201,7 @@ class APIAddons(CoreSysAttributes):
|
||||
await asyncio.shield(self.sys_store.reload())
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
|
||||
@@ -237,7 +241,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: _pretty_devices(addon),
|
||||
ATTR_DEVICES: addon.static_devices,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
@@ -250,6 +254,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_USB: addon.with_usb,
|
||||
ATTR_UART: addon.with_uart,
|
||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_UDEV: addon.with_udev,
|
||||
@@ -262,6 +267,7 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_SERVICES: _pretty_services(addon),
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
ATTR_IP_ADDRESS: None,
|
||||
ATTR_TRANSLATIONS: addon.translations,
|
||||
ATTR_INGRESS: addon.with_ingress,
|
||||
ATTR_INGRESS_ENTRY: None,
|
||||
ATTR_INGRESS_URL: None,
|
||||
@@ -286,6 +292,8 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_UPDATE_AVAILABLE: addon.need_update,
|
||||
ATTR_WATCHDOG: addon.watchdog,
|
||||
ATTR_DEVICES: addon.static_devices
|
||||
+ [device.path for device in addon.devices],
|
||||
}
|
||||
)
|
||||
|
||||
@@ -301,7 +309,7 @@ class APIAddons(CoreSysAttributes):
|
||||
|
||||
# Extend schema with add-on specific validation
|
||||
addon_schema = SCHEMA_OPTIONS.extend(
|
||||
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
|
||||
{vol.Optional(ATTR_OPTIONS): vol.Maybe(addon.schema)}
|
||||
)
|
||||
|
||||
# Validate/Process Body
|
||||
@@ -330,20 +338,61 @@ class APIAddons(CoreSysAttributes):
|
||||
async def options_validate(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
addon = self._extract_addon_installed(request)
|
||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True}
|
||||
data = {ATTR_MESSAGE: "", ATTR_VALID: True, ATTR_PWNED: False}
|
||||
|
||||
options = await request.json(loads=json_loads) or addon.options
|
||||
|
||||
# Validate config
|
||||
options_schema = addon.schema
|
||||
try:
|
||||
addon.schema(addon.options)
|
||||
options_schema.validate(options)
|
||||
except vol.Invalid as ex:
|
||||
data[ATTR_MESSAGE] = humanize_error(addon.options, ex)
|
||||
data[ATTR_MESSAGE] = humanize_error(options, ex)
|
||||
data[ATTR_VALID] = False
|
||||
|
||||
if not self.sys_security.pwned:
|
||||
return data
|
||||
|
||||
# Pwned check
|
||||
for secret in options_schema.pwned:
|
||||
try:
|
||||
await self.sys_security.verify_secret(secret)
|
||||
continue
|
||||
except PwnedSecret:
|
||||
data[ATTR_PWNED] = True
|
||||
except PwnedError:
|
||||
data[ATTR_PWNED] = None
|
||||
break
|
||||
|
||||
if self.sys_security.force and data[ATTR_PWNED] in (None, True):
|
||||
data[ATTR_VALID] = False
|
||||
if data[ATTR_PWNED] is None:
|
||||
data[ATTR_MESSAGE] = "Error happening on pwned secrets check!"
|
||||
else:
|
||||
data[ATTR_MESSAGE] = "Add-on uses pwned secrets!"
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options_config(self, request: web.Request) -> None:
|
||||
"""Validate user options for add-on."""
|
||||
slug: str = request.match_info.get("addon")
|
||||
if slug != "self":
|
||||
raise APIForbidden("This can be only read by the Add-on itself!")
|
||||
addon = self._extract_addon_installed(request)
|
||||
|
||||
# Lookup/reload secrets
|
||||
await self.sys_homeassistant.secrets.reload()
|
||||
try:
|
||||
return addon.schema.validate(addon.options)
|
||||
except vol.Invalid:
|
||||
raise APIError("Invalid configuration data for the add-on") from None
|
||||
|
||||
@api_process
|
||||
async def security(self, request: web.Request) -> None:
|
||||
"""Store security options for add-on."""
|
||||
addon = self._extract_addon_installed(request)
|
||||
body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||
body: dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||
|
||||
if ATTR_PROTECTED in body:
|
||||
_LOGGER.warning("Changing protected flag for %s!", addon.slug)
|
||||
@@ -352,7 +401,7 @@ class APIAddons(CoreSysAttributes):
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
addon = self._extract_addon_installed(request)
|
||||
|
||||
@@ -369,12 +418,6 @@ class APIAddons(CoreSysAttributes):
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
def install(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Install add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Uninstall add-on."""
|
||||
@@ -393,12 +436,6 @@ class APIAddons(CoreSysAttributes):
|
||||
addon = self._extract_addon_installed(request)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def update(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Update add-on."""
|
||||
addon: Addon = self._extract_addon_installed(request)
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart add-on."""
|
||||
@@ -468,14 +505,6 @@ class APIAddons(CoreSysAttributes):
|
||||
await asyncio.shield(addon.write_stdin(data))
|
||||
|
||||
|
||||
def _pretty_devices(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return []
|
||||
return [row.split(":")[0] for row in dev_list]
|
||||
|
||||
|
||||
def _pretty_services(addon: AnyAddon) -> List[str]:
|
||||
def _pretty_services(addon: AnyAddon) -> list[str]:
|
||||
"""Return a simplified services role list."""
|
||||
return [f"{name}:{access}" for name, access in addon.services_role.items()]
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Init file for Supervisor Audio RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
from typing import Any, Awaitable
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
@@ -56,10 +56,10 @@ SCHEMA_MUTE = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_DEFAULT = vol.Schema({vol.Required(ATTR_NAME): vol.Coerce(str)})
|
||||
SCHEMA_DEFAULT = vol.Schema({vol.Required(ATTR_NAME): str})
|
||||
|
||||
SCHEMA_PROFILE = vol.Schema(
|
||||
{vol.Required(ATTR_CARD): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str)}
|
||||
{vol.Required(ATTR_CARD): str, vol.Required(ATTR_NAME): str}
|
||||
)
|
||||
|
||||
|
||||
@@ -67,7 +67,7 @@ class APIAudio(CoreSysAttributes):
|
||||
"""Handle RESTful API for Audio functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return Audio information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_plugins.audio.version,
|
||||
@@ -89,7 +89,7 @@ class APIAudio(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_plugins.audio.stats()
|
||||
|
||||
|
@@ -1,7 +1,6 @@
|
||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from aiohttp import BasicAuth, web
|
||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||
@@ -24,11 +23,15 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_PASSWORD_RESET = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USERNAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||
vol.Required(ATTR_USERNAME): str,
|
||||
vol.Required(ATTR_PASSWORD): str,
|
||||
}
|
||||
)
|
||||
|
||||
REALM_HEADER: dict[str, str] = {
|
||||
WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'
|
||||
}
|
||||
|
||||
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
@@ -42,7 +45,7 @@ class APIAuth(CoreSysAttributes):
|
||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||
|
||||
def _process_dict(
|
||||
self, request: web.Request, addon: Addon, data: Dict[str, str]
|
||||
self, request: web.Request, addon: Addon, data: dict[str, str]
|
||||
) -> bool:
|
||||
"""Process login with dict data.
|
||||
|
||||
@@ -63,7 +66,9 @@ class APIAuth(CoreSysAttributes):
|
||||
|
||||
# BasicAuth
|
||||
if AUTHORIZATION in request.headers:
|
||||
return await self._process_basic(request, addon)
|
||||
if not await self._process_basic(request, addon):
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER)
|
||||
return True
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
@@ -75,14 +80,12 @@ class APIAuth(CoreSysAttributes):
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(
|
||||
headers={WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'}
|
||||
)
|
||||
raise HTTPUnauthorized(headers=REALM_HEADER)
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Process reset password request."""
|
||||
body: Dict[str, str] = await api_validate(SCHEMA_PASSWORD_RESET, request)
|
||||
body: dict[str, str] = await api_validate(SCHEMA_PASSWORD_RESET, request)
|
||||
await asyncio.shield(
|
||||
self.sys_auth.change_password(body[ATTR_USERNAME], body[ATTR_PASSWORD])
|
||||
)
|
||||
|
218
supervisor/api/backups.py
Normal file
218
supervisor/api/backups.py
Normal file
@@ -0,0 +1,218 @@
|
||||
"""Backups RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import re
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.hdrs import CONTENT_DISPOSITION
|
||||
import voluptuous as vol
|
||||
|
||||
from ..backups.validate import ALL_FOLDERS
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
ATTR_BACKUPS,
|
||||
ATTR_CONTENT,
|
||||
ATTR_DATE,
|
||||
ATTR_FOLDERS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_NAME,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_SIZE,
|
||||
ATTR_SLUG,
|
||||
ATTR_TYPE,
|
||||
ATTR_VERSION,
|
||||
CONTENT_TYPE_TAR,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
RE_SLUGIFY_NAME = re.compile(r"[^A-Za-z0-9]+")
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_RESTORE_PARTIAL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_RESTORE_FULL = vol.Schema({vol.Optional(ATTR_PASSWORD): vol.Maybe(str)})
|
||||
|
||||
SCHEMA_BACKUP_FULL = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_NAME): str,
|
||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(str),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_BACKUP_PARTIAL = SCHEMA_BACKUP_FULL.extend(
|
||||
{
|
||||
vol.Optional(ATTR_ADDONS): vol.All([str], vol.Unique()),
|
||||
vol.Optional(ATTR_FOLDERS): vol.All([vol.In(ALL_FOLDERS)], vol.Unique()),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIBackups(CoreSysAttributes):
|
||||
"""Handle RESTful API for backups functions."""
|
||||
|
||||
def _extract_slug(self, request):
|
||||
"""Return backup, throw an exception if it doesn't exist."""
|
||||
backup = self.sys_backups.get(request.match_info.get("slug"))
|
||||
if not backup:
|
||||
raise APIError("Backup does not exist")
|
||||
return backup
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return backup list."""
|
||||
data_backups = []
|
||||
for backup in self.sys_backups.list_backups:
|
||||
data_backups.append(
|
||||
{
|
||||
ATTR_SLUG: backup.slug,
|
||||
ATTR_NAME: backup.name,
|
||||
ATTR_DATE: backup.date,
|
||||
ATTR_TYPE: backup.sys_type,
|
||||
ATTR_SIZE: backup.size,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_CONTENT: {
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version is not None,
|
||||
ATTR_ADDONS: backup.addon_list,
|
||||
ATTR_FOLDERS: backup.folders,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
if request.path == "/snapshots":
|
||||
# Kept for backwards compability
|
||||
return {"snapshots": data_backups}
|
||||
|
||||
return {ATTR_BACKUPS: data_backups}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload backup list."""
|
||||
await asyncio.shield(self.sys_backups.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return backup info."""
|
||||
backup = self._extract_slug(request)
|
||||
|
||||
data_addons = []
|
||||
for addon_data in backup.addons:
|
||||
data_addons.append(
|
||||
{
|
||||
ATTR_SLUG: addon_data[ATTR_SLUG],
|
||||
ATTR_NAME: addon_data[ATTR_NAME],
|
||||
ATTR_VERSION: addon_data[ATTR_VERSION],
|
||||
ATTR_SIZE: addon_data[ATTR_SIZE],
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
ATTR_SLUG: backup.slug,
|
||||
ATTR_TYPE: backup.sys_type,
|
||||
ATTR_NAME: backup.name,
|
||||
ATTR_DATE: backup.date,
|
||||
ATTR_SIZE: backup.size,
|
||||
ATTR_PROTECTED: backup.protected,
|
||||
ATTR_HOMEASSISTANT: backup.homeassistant_version,
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: backup.repositories,
|
||||
ATTR_FOLDERS: backup.folders,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def backup_full(self, request):
|
||||
"""Create full backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_FULL, request)
|
||||
backup = await asyncio.shield(self.sys_backups.do_backup_full(**body))
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
async def backup_partial(self, request):
|
||||
"""Create a partial backup."""
|
||||
body = await api_validate(SCHEMA_BACKUP_PARTIAL, request)
|
||||
backup = await asyncio.shield(self.sys_backups.do_backup_partial(**body))
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
return False
|
||||
|
||||
@api_process
|
||||
async def restore_full(self, request):
|
||||
"""Full restore of a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_RESTORE_FULL, request)
|
||||
|
||||
return await asyncio.shield(self.sys_backups.do_restore_full(backup, **body))
|
||||
|
||||
@api_process
|
||||
async def restore_partial(self, request):
|
||||
"""Partial restore a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
body = await api_validate(SCHEMA_RESTORE_PARTIAL, request)
|
||||
|
||||
return await asyncio.shield(self.sys_backups.do_restore_partial(backup, **body))
|
||||
|
||||
@api_process
|
||||
async def remove(self, request):
|
||||
"""Remove a backup."""
|
||||
backup = self._extract_slug(request)
|
||||
return self.sys_backups.remove(backup)
|
||||
|
||||
async def download(self, request):
|
||||
"""Download a backup file."""
|
||||
backup = self._extract_slug(request)
|
||||
|
||||
_LOGGER.info("Downloading backup %s", backup.slug)
|
||||
response = web.FileResponse(backup.tarfile)
|
||||
response.content_type = CONTENT_TYPE_TAR
|
||||
response.headers[
|
||||
CONTENT_DISPOSITION
|
||||
] = f"attachment; filename={RE_SLUGIFY_NAME.sub('_', backup.name)}.tar"
|
||||
return response
|
||||
|
||||
@api_process
|
||||
async def upload(self, request):
|
||||
"""Upload a backup file."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp_dir:
|
||||
tar_file = Path(temp_dir, "backup.tar")
|
||||
reader = await request.multipart()
|
||||
contents = await reader.next()
|
||||
try:
|
||||
with tar_file.open("wb") as backup:
|
||||
while True:
|
||||
chunk = await contents.read_chunk()
|
||||
if not chunk:
|
||||
break
|
||||
backup.write(chunk)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write new backup file: %s", err)
|
||||
return False
|
||||
|
||||
except asyncio.CancelledError:
|
||||
return False
|
||||
|
||||
backup = await asyncio.shield(self.sys_backups.import_backup(tar_file))
|
||||
|
||||
if backup:
|
||||
return {ATTR_SLUG: backup.slug}
|
||||
return False
|
@@ -1,7 +1,7 @@
|
||||
"""Init file for Supervisor HA cli RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -32,7 +32,7 @@ class APICli(CoreSysAttributes):
|
||||
"""Handle RESTful API for HA Cli functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return HA cli information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_plugins.cli.version,
|
||||
@@ -41,7 +41,7 @@ class APICli(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_plugins.cli.stats()
|
||||
|
||||
|
15
supervisor/api/const.py
Normal file
15
supervisor/api/const.py
Normal file
@@ -0,0 +1,15 @@
|
||||
"""Const for API."""
|
||||
|
||||
ATTR_AGENT_VERSION = "agent_version"
|
||||
ATTR_BOOT_TIMESTAMP = "boot_timestamp"
|
||||
ATTR_DATA_DISK = "data_disk"
|
||||
ATTR_DEVICE = "device"
|
||||
ATTR_DT_SYNCHRONIZED = "dt_synchronized"
|
||||
ATTR_DT_UTC = "dt_utc"
|
||||
ATTR_STARTUP_TIME = "startup_time"
|
||||
ATTR_USE_NTP = "use_ntp"
|
||||
ATTR_USE_RTC = "use_rtc"
|
||||
ATTR_APPARMOR_VERSION = "apparmor_version"
|
||||
ATTR_PANEL_PATH = "panel_path"
|
||||
ATTR_UPDATE_TYPE = "update_type"
|
||||
ATTR_AVAILABLE_UPDATES = "available_updates"
|
@@ -13,7 +13,7 @@ from ..const import (
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from .utils import api_process, api_validate
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
{
|
||||
@@ -33,15 +33,10 @@ class APIDiscovery(CoreSysAttributes):
|
||||
raise APIError("Discovery message not found")
|
||||
return message
|
||||
|
||||
def _check_permission_ha(self, request):
|
||||
"""Check permission for API call / Home Assistant."""
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
raise APIForbidden("Only HomeAssistant can use this API!")
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
self._check_permission_ha(request)
|
||||
|
||||
# Get available discovery
|
||||
discovery = []
|
||||
@@ -79,13 +74,11 @@ class APIDiscovery(CoreSysAttributes):
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@api_process
|
||||
@require_home_assistant
|
||||
async def get_discovery(self, request):
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
# HomeAssistant?
|
||||
self._check_permission_ha(request)
|
||||
|
||||
return {
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Init file for Supervisor DNS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
from typing import Any, Awaitable
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -40,7 +40,7 @@ class APICoreDNS(CoreSysAttributes):
|
||||
"""Handle RESTful API for DNS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return DNS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_plugins.dns.version,
|
||||
@@ -63,7 +63,7 @@ class APICoreDNS(CoreSysAttributes):
|
||||
self.sys_plugins.dns.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_plugins.dns.stats()
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -21,7 +21,7 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_DOCKER_REGISTRY = vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): {
|
||||
str: {
|
||||
vol.Required(ATTR_USERNAME): str,
|
||||
vol.Required(ATTR_PASSWORD): str,
|
||||
}
|
||||
@@ -33,7 +33,7 @@ class APIDocker(CoreSysAttributes):
|
||||
"""Handle RESTful API for Docker configuration."""
|
||||
|
||||
@api_process
|
||||
async def registries(self, request) -> Dict[str, Any]:
|
||||
async def registries(self, request) -> dict[str, Any]:
|
||||
"""Return the list of registries."""
|
||||
data_registries = {}
|
||||
for hostname, registry in self.sys_docker.config.registries.items():
|
||||
|
@@ -1,54 +1,50 @@
|
||||
"""Init file for Supervisor hardware RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict, List
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_AUDIO,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
ATTR_SERIAL,
|
||||
ATTR_USB,
|
||||
)
|
||||
from ..const import ATTR_AUDIO, ATTR_DEVICES, ATTR_INPUT, ATTR_NAME, ATTR_OUTPUT
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..hardware.const import (
|
||||
ATTR_ATTRIBUTES,
|
||||
ATTR_BY_ID,
|
||||
ATTR_DEV_PATH,
|
||||
ATTR_SUBSYSTEM,
|
||||
ATTR_SYSFS,
|
||||
)
|
||||
from ..hardware.data import Device
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def device_struct(device: Device) -> dict[str, Any]:
|
||||
"""Return a dict with information of a interface to be used in th API."""
|
||||
return {
|
||||
ATTR_NAME: device.name,
|
||||
ATTR_SYSFS: device.sysfs,
|
||||
ATTR_DEV_PATH: device.path,
|
||||
ATTR_SUBSYSTEM: device.subsystem,
|
||||
ATTR_BY_ID: device.by_id,
|
||||
ATTR_ATTRIBUTES: device.attributes,
|
||||
}
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Show hardware info."""
|
||||
serial: List[str] = []
|
||||
|
||||
# Create Serial list with device links
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
serial.append(device.path.as_posix())
|
||||
for link in device.links:
|
||||
serial.append(link.as_posix())
|
||||
|
||||
return {
|
||||
ATTR_SERIAL: serial,
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: [
|
||||
device.path.as_posix() for device in self.sys_hardware.disk_devices
|
||||
],
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_USB: [
|
||||
device.path.as_posix() for device in self.sys_hardware.usb_devices
|
||||
],
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
ATTR_DEVICES: [
|
||||
device_struct(device) for device in self.sys_hardware.devices
|
||||
]
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def audio(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Show pulse audio profiles."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
@@ -62,8 +58,3 @@ class APIHardware(CoreSysAttributes):
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@api_process
|
||||
def trigger(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger a udev device reload."""
|
||||
return asyncio.shield(self.sys_hardware.udev_trigger())
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
from typing import Any, Awaitable
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -10,6 +10,7 @@ from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_BACKUP,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
@@ -43,25 +44,30 @@ _LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Optional(ATTR_IMAGE): docker_image,
|
||||
vol.Optional(ATTR_IMAGE): vol.Maybe(docker_image),
|
||||
vol.Optional(ATTR_PORT): network_port,
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(str),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(str),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
SCHEMA_UPDATE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_VERSION): version_tag,
|
||||
vol.Optional(ATTR_BACKUP): bool,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle RESTful API for Home Assistant functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
@@ -117,7 +123,7 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[Any, str]:
|
||||
async def stats(self, request: web.Request) -> dict[Any, str]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_homeassistant.core.stats()
|
||||
if not stats:
|
||||
@@ -137,10 +143,14 @@ class APIHomeAssistant(CoreSysAttributes):
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Home Assistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.latest_version)
|
||||
body = await api_validate(SCHEMA_UPDATE, request)
|
||||
|
||||
await asyncio.shield(self.sys_homeassistant.core.update(version))
|
||||
await asyncio.shield(
|
||||
self.sys_homeassistant.core.update(
|
||||
version=body.get(ATTR_VERSION, self.sys_homeassistant.latest_version),
|
||||
backup=body.get(ATTR_BACKUP),
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
|
@@ -11,6 +11,7 @@ from ..const import (
|
||||
ATTR_DEPLOYMENT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DISK_FREE,
|
||||
ATTR_DISK_LIFE_TIME,
|
||||
ATTR_DISK_TOTAL,
|
||||
ATTR_DISK_USED,
|
||||
ATTR_FEATURES,
|
||||
@@ -20,14 +21,25 @@ from ..const import (
|
||||
ATTR_OPERATING_SYSTEM,
|
||||
ATTR_SERVICES,
|
||||
ATTR_STATE,
|
||||
ATTR_TIMEZONE,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
ATTR_AGENT_VERSION,
|
||||
ATTR_APPARMOR_VERSION,
|
||||
ATTR_BOOT_TIMESTAMP,
|
||||
ATTR_DT_SYNCHRONIZED,
|
||||
ATTR_DT_UTC,
|
||||
ATTR_STARTUP_TIME,
|
||||
ATTR_USE_NTP,
|
||||
ATTR_USE_RTC,
|
||||
)
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
SERVICE = "service"
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): vol.Coerce(str)})
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): str})
|
||||
|
||||
|
||||
class APIHost(CoreSysAttributes):
|
||||
@@ -37,16 +49,26 @@ class APIHost(CoreSysAttributes):
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_AGENT_VERSION: self.sys_dbus.agent.version,
|
||||
ATTR_APPARMOR_VERSION: self.sys_host.apparmor.version,
|
||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_DISK_FREE: self.sys_host.info.free_space,
|
||||
ATTR_DISK_TOTAL: self.sys_host.info.total_space,
|
||||
ATTR_DISK_USED: self.sys_host.info.used_space,
|
||||
ATTR_DISK_LIFE_TIME: self.sys_host.info.disk_life_time,
|
||||
ATTR_FEATURES: self.sys_host.features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_KERNEL: self.sys_host.info.kernel,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
ATTR_TIMEZONE: self.sys_host.info.timezone,
|
||||
ATTR_DT_UTC: self.sys_host.info.dt_utc,
|
||||
ATTR_DT_SYNCHRONIZED: self.sys_host.info.dt_synchronized,
|
||||
ATTR_USE_NTP: self.sys_host.info.use_ntp,
|
||||
ATTR_USE_RTC: self.sys_host.info.use_rtc,
|
||||
ATTR_STARTUP_TIME: self.sys_host.info.startup_time,
|
||||
ATTR_BOOT_TIMESTAMP: self.sys_host.info.boot_timestamp,
|
||||
}
|
||||
|
||||
@api_process
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Init file for Supervisor info RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
@@ -15,6 +15,7 @@ from ..const import (
|
||||
ATTR_LOGGING,
|
||||
ATTR_MACHINE,
|
||||
ATTR_OPERATING_SYSTEM,
|
||||
ATTR_STATE,
|
||||
ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED,
|
||||
ATTR_SUPPORTED_ARCH,
|
||||
@@ -30,21 +31,22 @@ class APIInfo(CoreSysAttributes):
|
||||
"""Handle RESTful API for info functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Show system info."""
|
||||
return {
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
||||
ATTR_HASSOS: self.sys_hassos.version,
|
||||
ATTR_HASSOS: self.sys_os.version,
|
||||
ATTR_DOCKER: self.sys_docker.info.version,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
ATTR_FEATURES: self.sys_host.features,
|
||||
ATTR_MACHINE: self.sys_machine,
|
||||
ATTR_ARCH: self.sys_arch.default,
|
||||
ATTR_STATE: self.sys_core.state,
|
||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||
ATTR_SUPPORTED: self.sys_core.supported,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_LOGGING: self.sys_config.logging,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
ATTR_TIMEZONE: self.sys_timezone,
|
||||
}
|
||||
|
@@ -2,10 +2,10 @@
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
from typing import Any, Dict, Union
|
||||
from typing import Any, Union
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs, web
|
||||
from aiohttp import ClientTimeout, hdrs, web
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadGateway,
|
||||
HTTPServiceUnavailable,
|
||||
@@ -25,10 +25,9 @@ from ..const import (
|
||||
COOKIE_INGRESS,
|
||||
HEADER_TOKEN,
|
||||
HEADER_TOKEN_OLD,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
from .utils import api_process, api_validate, require_home_assistant
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -50,17 +49,12 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
return addon
|
||||
|
||||
def _check_ha_access(self, request: web.Request) -> None:
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
_LOGGER.warning("Ingress is only available behind Home Assistant")
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
def _create_url(self, addon: Addon, path: str) -> str:
|
||||
"""Create URL to container."""
|
||||
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
|
||||
|
||||
@api_process
|
||||
async def panels(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def panels(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Create a list of panel data."""
|
||||
addons = {}
|
||||
for addon in self.sys_ingress.addons:
|
||||
@@ -74,18 +68,16 @@ class APIIngress(CoreSysAttributes):
|
||||
return {ATTR_PANELS: addons}
|
||||
|
||||
@api_process
|
||||
async def create_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
@require_home_assistant
|
||||
async def create_session(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
session = self.sys_ingress.create_session()
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
@api_process
|
||||
async def validate_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
@require_home_assistant
|
||||
async def validate_session(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Validate session and extending how long it's valid for."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
data = await api_validate(VALIDATE_SESSION_DATA, request)
|
||||
|
||||
# Check Ingress Session
|
||||
@@ -93,11 +85,11 @@ class APIIngress(CoreSysAttributes):
|
||||
_LOGGER.warning("No valid ingress session %s", data[ATTR_SESSION])
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
@require_home_assistant
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
"""Route data to Supervisor ingress service."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
@@ -170,9 +162,18 @@ class APIIngress(CoreSysAttributes):
|
||||
) -> Union[web.Response, web.StreamResponse]:
|
||||
"""Ingress route for request."""
|
||||
url = self._create_url(addon, path)
|
||||
data = await request.read()
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
# Passing the raw stream breaks requests for some webservers
|
||||
# since we just need it for POST requests really, for all other methods
|
||||
# we read the bytes and pass that to the request to the add-on
|
||||
# add-ons needs to add support with that in the configuration
|
||||
data = (
|
||||
request.content
|
||||
if request.method == "POST" and addon.ingress_stream
|
||||
else await request.read()
|
||||
)
|
||||
|
||||
async with self.sys_websession.request(
|
||||
request.method,
|
||||
url,
|
||||
@@ -180,6 +181,7 @@ class APIIngress(CoreSysAttributes):
|
||||
params=request.query,
|
||||
allow_redirects=False,
|
||||
data=data,
|
||||
timeout=ClientTimeout(total=None),
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
|
||||
@@ -218,7 +220,7 @@ class APIIngress(CoreSysAttributes):
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: str
|
||||
) -> Union[CIMultiDict, Dict[str, str]]:
|
||||
) -> Union[CIMultiDict, dict[str, str]]:
|
||||
"""Create initial header."""
|
||||
headers = {}
|
||||
|
||||
@@ -227,6 +229,7 @@ def _init_header(
|
||||
if name in (
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||
hdrs.SEC_WEBSOCKET_VERSION,
|
||||
@@ -245,7 +248,7 @@ def _init_header(
|
||||
return headers
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse) -> Dict[str, str]:
|
||||
def _response_header(response: aiohttp.ClientResponse) -> dict[str, str]:
|
||||
"""Create response header."""
|
||||
headers = {}
|
||||
|
||||
|
@@ -1,6 +1,6 @@
|
||||
"""Init file for Supervisor Jobs RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -20,7 +20,7 @@ class APIJobs(CoreSysAttributes):
|
||||
"""Handle RESTful API for OS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return JobManager information."""
|
||||
return {
|
||||
ATTR_IGNORE_CONDITIONS: self.sys_jobs.ignore_conditions,
|
||||
@@ -36,6 +36,8 @@ class APIJobs(CoreSysAttributes):
|
||||
|
||||
self.sys_jobs.save_data()
|
||||
|
||||
await self.sys_resolution.evaluate.evaluate_system()
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Reset options for JobManager."""
|
||||
|
1
supervisor/api/middleware/__init__.py
Normal file
1
supervisor/api/middleware/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""API middleware for aiohttp."""
|
208
supervisor/api/middleware/security.py
Normal file
208
supervisor/api/middleware/security.py
Normal file
@@ -0,0 +1,208 @@
|
||||
"""Handle security part of this API."""
|
||||
import logging
|
||||
import re
|
||||
|
||||
from aiohttp.web import Request, RequestHandler, Response, middleware
|
||||
from aiohttp.web_exceptions import HTTPForbidden, HTTPUnauthorized
|
||||
|
||||
from ...const import (
|
||||
REQUEST_FROM,
|
||||
ROLE_ADMIN,
|
||||
ROLE_BACKUP,
|
||||
ROLE_DEFAULT,
|
||||
ROLE_HOMEASSISTANT,
|
||||
ROLE_MANAGER,
|
||||
CoreState,
|
||||
)
|
||||
from ...coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils import api_return_error, excract_supervisor_token
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
# fmt: off
|
||||
|
||||
# Block Anytime
|
||||
BLACKLIST = re.compile(
|
||||
r"^(?:"
|
||||
r"|/homeassistant/api/hassio/.*"
|
||||
r"|/core/api/hassio/.*"
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Free to call or have own security concepts
|
||||
NO_SECURITY_CHECK = re.compile(
|
||||
r"^(?:"
|
||||
r"|/homeassistant/api/.*"
|
||||
r"|/homeassistant/websocket"
|
||||
r"|/core/api/.*"
|
||||
r"|/core/websocket"
|
||||
r"|/supervisor/ping"
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Observer allow API calls
|
||||
OBSERVER_CHECK = re.compile(
|
||||
r"^(?:"
|
||||
r"|/.+/info"
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Can called by every add-on
|
||||
ADDONS_API_BYPASS = re.compile(
|
||||
r"^(?:"
|
||||
r"|/addons/self/(?!security|update)[^/]+"
|
||||
r"|/addons/self/options/config"
|
||||
r"|/info"
|
||||
r"|/services.*"
|
||||
r"|/discovery.*"
|
||||
r"|/auth"
|
||||
r")$"
|
||||
)
|
||||
|
||||
# Policy role add-on API access
|
||||
ADDONS_ROLE_ACCESS = {
|
||||
ROLE_DEFAULT: re.compile(
|
||||
r"^(?:"
|
||||
r"|/.+/info"
|
||||
r")$"
|
||||
),
|
||||
ROLE_HOMEASSISTANT: re.compile(
|
||||
r"^(?:"
|
||||
r"|/.+/info"
|
||||
r"|/core/.+"
|
||||
r"|/homeassistant/.+"
|
||||
r")$"
|
||||
),
|
||||
ROLE_BACKUP: re.compile(
|
||||
r"^(?:"
|
||||
r"|/.+/info"
|
||||
r"|/backups.*"
|
||||
r"|/snapshots.*"
|
||||
r")$"
|
||||
),
|
||||
ROLE_MANAGER: re.compile(
|
||||
r"^(?:"
|
||||
r"|/.+/info"
|
||||
r"|/addons(?:/[^/]+/(?!security).+|/reload)?"
|
||||
r"|/audio/.+"
|
||||
r"|/auth/cache"
|
||||
r"|/cli/.+"
|
||||
r"|/core/.+"
|
||||
r"|/dns/.+"
|
||||
r"|/docker/.+"
|
||||
r"|/jobs/.+"
|
||||
r"|/hardware/.+"
|
||||
r"|/hassos/.+"
|
||||
r"|/homeassistant/.+"
|
||||
r"|/host/.+"
|
||||
r"|/multicast/.+"
|
||||
r"|/network/.+"
|
||||
r"|/observer/.+"
|
||||
r"|/os/.+"
|
||||
r"|/resolution/.+"
|
||||
r"|/backups.*"
|
||||
r"|/snapshots.*"
|
||||
r"|/store.*"
|
||||
r"|/supervisor/.+"
|
||||
r"|/security/.+"
|
||||
r")$"
|
||||
),
|
||||
ROLE_ADMIN: re.compile(
|
||||
r".*"
|
||||
),
|
||||
}
|
||||
|
||||
# fmt: on
|
||||
|
||||
|
||||
class SecurityMiddleware(CoreSysAttributes):
|
||||
"""Security middleware functions."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize security middleware."""
|
||||
self.coresys: CoreSys = coresys
|
||||
|
||||
@middleware
|
||||
async def system_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Check if core is ready to response."""
|
||||
if self.sys_core.state not in (
|
||||
CoreState.STARTUP,
|
||||
CoreState.RUNNING,
|
||||
CoreState.FREEZE,
|
||||
):
|
||||
return api_return_error(
|
||||
message=f"System is not ready with state: {self.sys_core.state.value}"
|
||||
)
|
||||
|
||||
return await handler(request)
|
||||
|
||||
@middleware
|
||||
async def token_validation(
|
||||
self, request: Request, handler: RequestHandler
|
||||
) -> Response:
|
||||
"""Check security access of this layer."""
|
||||
request_from = None
|
||||
supervisor_token = excract_supervisor_token(request)
|
||||
|
||||
# Blacklist
|
||||
if BLACKLIST.match(request.path):
|
||||
_LOGGER.error("%s is blacklisted!", request.path)
|
||||
raise HTTPForbidden()
|
||||
|
||||
# Ignore security check
|
||||
if NO_SECURITY_CHECK.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s", request.path)
|
||||
return await handler(request)
|
||||
|
||||
# Not token
|
||||
if not supervisor_token:
|
||||
_LOGGER.warning("No API token provided for %s", request.path)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Home-Assistant
|
||||
if supervisor_token == self.sys_homeassistant.supervisor_token:
|
||||
_LOGGER.debug("%s access from Home Assistant", request.path)
|
||||
request_from = self.sys_homeassistant
|
||||
|
||||
# Host
|
||||
if supervisor_token == self.sys_plugins.cli.supervisor_token:
|
||||
_LOGGER.debug("%s access from Host", request.path)
|
||||
request_from = self.sys_host
|
||||
|
||||
# Observer
|
||||
if supervisor_token == self.sys_plugins.observer.supervisor_token:
|
||||
if not OBSERVER_CHECK.match(request.path):
|
||||
_LOGGER.warning("%s invalid Observer access", request.path)
|
||||
raise HTTPForbidden()
|
||||
_LOGGER.debug("%s access from Observer", request.path)
|
||||
request_from = self.sys_plugins.observer
|
||||
|
||||
# Add-on
|
||||
addon = None
|
||||
if supervisor_token and not request_from:
|
||||
addon = self.sys_addons.from_token(supervisor_token)
|
||||
|
||||
# Check Add-on API access
|
||||
if addon and ADDONS_API_BYPASS.match(request.path):
|
||||
_LOGGER.debug("Passthrough %s from %s", request.path, addon.slug)
|
||||
request_from = addon
|
||||
elif addon and addon.access_hassio_api:
|
||||
# Check Role
|
||||
if ADDONS_ROLE_ACCESS[addon.hassio_role].match(request.path):
|
||||
_LOGGER.info("%s access from %s", request.path, addon.slug)
|
||||
request_from = addon
|
||||
else:
|
||||
_LOGGER.warning("%s no role for %s", request.path, addon.slug)
|
||||
elif addon:
|
||||
_LOGGER.warning(
|
||||
"%s missing API permission for %s", addon.slug, request.path
|
||||
)
|
||||
|
||||
if request_from:
|
||||
request[REQUEST_FROM] = request_from
|
||||
return await handler(request)
|
||||
|
||||
_LOGGER.error("Invalid token for access %s", request.path)
|
||||
raise HTTPForbidden()
|
@@ -1,7 +1,7 @@
|
||||
"""Init file for Supervisor Multicast RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
from typing import Any, Awaitable
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -34,7 +34,7 @@ class APIMulticast(CoreSysAttributes):
|
||||
"""Handle RESTful API for Multicast functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return Multicast information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_plugins.multicast.version,
|
||||
@@ -43,7 +43,7 @@ class APIMulticast(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_plugins.multicast.stats()
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""REST API for network."""
|
||||
import asyncio
|
||||
from ipaddress import ip_address, ip_interface
|
||||
from typing import Any, Awaitable, Dict
|
||||
from typing import Any, Awaitable
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
@@ -18,6 +18,7 @@ from ..const import (
|
||||
ATTR_FREQUENCY,
|
||||
ATTR_GATEWAY,
|
||||
ATTR_HOST_INTERNET,
|
||||
ATTR_ID,
|
||||
ATTR_INTERFACE,
|
||||
ATTR_INTERFACES,
|
||||
ATTR_IPV4,
|
||||
@@ -26,6 +27,7 @@ from ..const import (
|
||||
ATTR_METHOD,
|
||||
ATTR_MODE,
|
||||
ATTR_NAMESERVERS,
|
||||
ATTR_PARENT,
|
||||
ATTR_PRIMARY,
|
||||
ATTR_PSK,
|
||||
ATTR_SIGNAL,
|
||||
@@ -80,7 +82,7 @@ SCHEMA_UPDATE = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def ipconfig_struct(config: IpConfig) -> dict:
|
||||
def ipconfig_struct(config: IpConfig) -> dict[str, Any]:
|
||||
"""Return a dict with information about ip configuration."""
|
||||
return {
|
||||
ATTR_METHOD: config.method,
|
||||
@@ -90,7 +92,7 @@ def ipconfig_struct(config: IpConfig) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def wifi_struct(config: WifiConfig) -> dict:
|
||||
def wifi_struct(config: WifiConfig) -> dict[str, Any]:
|
||||
"""Return a dict with information about wifi configuration."""
|
||||
return {
|
||||
ATTR_MODE: config.mode,
|
||||
@@ -100,7 +102,15 @@ def wifi_struct(config: WifiConfig) -> dict:
|
||||
}
|
||||
|
||||
|
||||
def interface_struct(interface: Interface) -> dict:
|
||||
def vlan_struct(config: VlanConfig) -> dict[str, Any]:
|
||||
"""Return a dict with information about VLAN configuration."""
|
||||
return {
|
||||
ATTR_ID: config.id,
|
||||
ATTR_PARENT: config.interface,
|
||||
}
|
||||
|
||||
|
||||
def interface_struct(interface: Interface) -> dict[str, Any]:
|
||||
"""Return a dict with information of a interface to be used in th API."""
|
||||
return {
|
||||
ATTR_INTERFACE: interface.name,
|
||||
@@ -111,11 +121,11 @@ def interface_struct(interface: Interface) -> dict:
|
||||
ATTR_IPV4: ipconfig_struct(interface.ipv4) if interface.ipv4 else None,
|
||||
ATTR_IPV6: ipconfig_struct(interface.ipv6) if interface.ipv6 else None,
|
||||
ATTR_WIFI: wifi_struct(interface.wifi) if interface.wifi else None,
|
||||
ATTR_VLAN: wifi_struct(interface.vlan) if interface.vlan else None,
|
||||
ATTR_VLAN: vlan_struct(interface.vlan) if interface.vlan else None,
|
||||
}
|
||||
|
||||
|
||||
def accesspoint_struct(accesspoint: AccessPoint) -> dict:
|
||||
def accesspoint_struct(accesspoint: AccessPoint) -> dict[str, Any]:
|
||||
"""Return a dict for AccessPoint."""
|
||||
return {
|
||||
ATTR_MODE: accesspoint.mode,
|
||||
@@ -145,10 +155,10 @@ class APINetwork(CoreSysAttributes):
|
||||
except HostNetworkNotFound:
|
||||
pass
|
||||
|
||||
raise APIError(f"Interface {name} does not exsist") from None
|
||||
raise APIError(f"Interface {name} does not exist") from None
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return network information."""
|
||||
return {
|
||||
ATTR_INTERFACES: [
|
||||
@@ -166,7 +176,7 @@ class APINetwork(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def interface_info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def interface_info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return network information for a interface."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
@@ -213,7 +223,7 @@ class APINetwork(CoreSysAttributes):
|
||||
return asyncio.shield(self.sys_host.network.update())
|
||||
|
||||
@api_process
|
||||
async def scan_accesspoints(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def scan_accesspoints(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Scan and return a list of available networks."""
|
||||
interface = self._get_interface(request.match_info.get(ATTR_INTERFACE))
|
||||
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Init file for Supervisor Observer RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -33,7 +33,7 @@ class APIObserver(CoreSysAttributes):
|
||||
"""Handle RESTful API for Observer functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return HA Observer information."""
|
||||
return {
|
||||
ATTR_HOST: str(self.sys_docker.network.observer),
|
||||
@@ -43,7 +43,7 @@ class APIObserver(CoreSysAttributes):
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def stats(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_plugins.observer.stats()
|
||||
|
||||
|
@@ -1,7 +1,8 @@
|
||||
"""Init file for Supervisor HassOS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
@@ -9,42 +10,60 @@ import voluptuous as vol
|
||||
from ..const import (
|
||||
ATTR_BOARD,
|
||||
ATTR_BOOT,
|
||||
ATTR_DEVICES,
|
||||
ATTR_UPDATE_AVAILABLE,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import version_tag
|
||||
from .const import ATTR_DATA_DISK, ATTR_DEVICE
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): version_tag})
|
||||
SCHEMA_DISK = vol.Schema({vol.Required(ATTR_DEVICE): vol.All(str, vol.Coerce(Path))})
|
||||
|
||||
|
||||
class APIOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for OS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
async def info(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return OS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_hassos.need_update,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
ATTR_VERSION: self.sys_os.version,
|
||||
ATTR_VERSION_LATEST: self.sys_os.latest_version,
|
||||
ATTR_UPDATE_AVAILABLE: self.sys_os.need_update,
|
||||
ATTR_BOARD: self.sys_os.board,
|
||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||
ATTR_DATA_DISK: self.sys_os.datadisk.disk_used,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.latest_version)
|
||||
version = body.get(ATTR_VERSION, self.sys_os.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
await asyncio.shield(self.sys_os.update(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger config reload on OS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
||||
return asyncio.shield(self.sys_os.config_sync())
|
||||
|
||||
@api_process
|
||||
async def migrate_data(self, request: web.Request) -> None:
|
||||
"""Trigger data disk migration on Host."""
|
||||
body = await api_validate(SCHEMA_DISK, request)
|
||||
|
||||
await asyncio.shield(self.sys_os.datadisk.migrate_disk(body[ATTR_DEVICE]))
|
||||
|
||||
@api_process
|
||||
async def list_data(self, request: web.Request) -> dict[str, Any]:
|
||||
"""Return possible data targets."""
|
||||
return {
|
||||
ATTR_DEVICES: self.sys_os.datadisk.available_disks,
|
||||
}
|
||||
|
@@ -1,9 +1,16 @@
|
||||
|
||||
try {
|
||||
new Function("import('/api/hassio/app/frontend_latest/entrypoint.df099659.js')")();
|
||||
} catch (err) {
|
||||
function loadES5() {
|
||||
var el = document.createElement('script');
|
||||
el.src = '/api/hassio/app/frontend_es5/entrypoint.d303236e.js';
|
||||
el.src = '/api/hassio/app/frontend_es5/entrypoint.5d40ff8b.js';
|
||||
document.body.appendChild(el);
|
||||
}
|
||||
if (/.*Version\/(?:11|12)(?:\.\d+)*.*Safari\//.test(navigator.userAgent)) {
|
||||
loadES5();
|
||||
} else {
|
||||
try {
|
||||
new Function("import('/api/hassio/app/frontend_latest/entrypoint.f09e9f8e.js')")();
|
||||
} catch (err) {
|
||||
loadES5();
|
||||
}
|
||||
}
|
||||
|
Binary file not shown.
1
supervisor/api/panel/frontend_es5/12dbe34e.js
Normal file
1
supervisor/api/panel/frontend_es5/12dbe34e.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/12dbe34e.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/12dbe34e.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/2dbdaab4.js
Normal file
1
supervisor/api/panel/frontend_es5/2dbdaab4.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/2dbdaab4.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/2dbdaab4.js.gz
Normal file
Binary file not shown.
2
supervisor/api/panel/frontend_es5/33b00c5f.js
Normal file
2
supervisor/api/panel/frontend_es5/33b00c5f.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/33b00c5f.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/33b00c5f.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/39b0c1a9.js
Normal file
1
supervisor/api/panel/frontend_es5/39b0c1a9.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/39b0c1a9.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/39b0c1a9.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/3a2e08bf.js
Normal file
1
supervisor/api/panel/frontend_es5/3a2e08bf.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/3a2e08bf.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/3a2e08bf.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/450f1129.js
Normal file
1
supervisor/api/panel/frontend_es5/450f1129.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/450f1129.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/450f1129.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/4a274bef.js
Normal file
1
supervisor/api/panel/frontend_es5/4a274bef.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/4a274bef.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/4a274bef.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/64ef8d49.js
Normal file
1
supervisor/api/panel/frontend_es5/64ef8d49.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/64ef8d49.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/64ef8d49.js.gz
Normal file
Binary file not shown.
2
supervisor/api/panel/frontend_es5/69e58998.js
Normal file
2
supervisor/api/panel/frontend_es5/69e58998.js
Normal file
File diff suppressed because one or more lines are too long
@@ -0,0 +1 @@
|
||||
/*! js-yaml 4.1.0 https://github.com/nodeca/js-yaml @license MIT */
|
BIN
supervisor/api/panel/frontend_es5/69e58998.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/69e58998.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/6b0926eb.js
Normal file
1
supervisor/api/panel/frontend_es5/6b0926eb.js
Normal file
@@ -0,0 +1 @@
|
||||
!function(){"use strict";var r,t,n={5425:function(r,t,n){var e=n(93217);n(58556);function o(r,t){return function(r){if(Array.isArray(r))return r}(r)||function(r,t){var n=null==r?null:"undefined"!=typeof Symbol&&r[Symbol.iterator]||r["@@iterator"];if(null==n)return;var e,o,u=[],i=!0,a=!1;try{for(n=n.call(r);!(i=(e=n.next()).done)&&(u.push(e.value),!t||u.length!==t);i=!0);}catch(f){a=!0,o=f}finally{try{i||null==n.return||n.return()}finally{if(a)throw o}}return u}(r,t)||function(r,t){if(!r)return;if("string"==typeof r)return u(r,t);var n=Object.prototype.toString.call(r).slice(8,-1);"Object"===n&&r.constructor&&(n=r.constructor.name);if("Map"===n||"Set"===n)return Array.from(r);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return u(r,t)}(r,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function u(r,t){(null==t||t>r.length)&&(t=r.length);for(var n=0,e=new Array(t);n<t;n++)e[n]=r[n];return e}var i={filterData:function(r,t,n){return n=n.toUpperCase(),r.filter((function(r){return Object.entries(t).some((function(t){var e=o(t,2),u=e[0],i=e[1];return!(!i.filterable||!String(i.filterKey?r[i.valueColumn||u][i.filterKey]:r[i.valueColumn||u]).toUpperCase().includes(n))}))}))},sortData:function(r,t,n,e){return r.sort((function(r,o){var u=1;"desc"===n&&(u=-1);var i=t.filterKey?r[t.valueColumn||e][t.filterKey]:r[t.valueColumn||e],a=t.filterKey?o[t.valueColumn||e][t.filterKey]:o[t.valueColumn||e];return"string"==typeof i&&(i=i.toUpperCase()),"string"==typeof a&&(a=a.toUpperCase()),void 0===i&&void 0!==a?1:void 0===a&&void 0!==i?-1:i<a?-1*u:i>a?1*u:0}))}};(0,e.Jj)(i)}},e={};function o(r){var t=e[r];if(void 0!==t)return t.exports;var u=e[r]={exports:{}};return n[r](u,u.exports,o),u.exports}o.m=n,o.x=function(){var r=o.O(void 0,[191],(function(){return o(5425)}));return r=o.O(r)},r=[],o.O=function(t,n,e,u){if(!n){var i=1/0;for(c=0;c<r.length;c++){n=r[c][0],e=r[c][1],u=r[c][2];for(var a=!0,f=0;f<n.length;f++)(!1&u||i>=u)&&Object.keys(o.O).every((function(r){return o.O[r](n[f])}))?n.splice(f--,1):(a=!1,u<i&&(i=u));if(a){r.splice(c--,1);var l=e();void 0!==l&&(t=l)}}return t}u=u||0;for(var c=r.length;c>0&&r[c-1][2]>u;c--)r[c]=r[c-1];r[c]=[n,e,u]},o.n=function(r){var t=r&&r.__esModule?function(){return r.default}:function(){return r};return o.d(t,{a:t}),t},o.d=function(r,t){for(var n in t)o.o(t,n)&&!o.o(r,n)&&Object.defineProperty(r,n,{enumerable:!0,get:t[n]})},o.f={},o.e=function(r){return Promise.all(Object.keys(o.f).reduce((function(t,n){return o.f[n](r,t),t}),[]))},o.u=function(r){return"2dbdaab4.js"},o.o=function(r,t){return Object.prototype.hasOwnProperty.call(r,t)},o.p="/api/hassio/app/frontend_es5/",function(){var r={477:1,425:1};o.f.i=function(t,n){r[t]||importScripts(o.p+o.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],n=t.push.bind(t);t.push=function(t){var e=t[0],u=t[1],i=t[2];for(var a in u)o.o(u,a)&&(o.m[a]=u[a]);for(i&&i(o);e.length;)r[e.pop()]=1;n(t)}}(),t=o.x,o.x=function(){return o.e(191).then(t)};o.x()}();
|
BIN
supervisor/api/panel/frontend_es5/6b0926eb.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/6b0926eb.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/6d91c010.js
Normal file
1
supervisor/api/panel/frontend_es5/6d91c010.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/6d91c010.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/6d91c010.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/72c04451.js
Normal file
1
supervisor/api/panel/frontend_es5/72c04451.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/72c04451.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/72c04451.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/75af0819.js
Normal file
1
supervisor/api/panel/frontend_es5/75af0819.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/75af0819.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/75af0819.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/8066b1de.js
Normal file
1
supervisor/api/panel/frontend_es5/8066b1de.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/8066b1de.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/8066b1de.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/94873099.js
Normal file
1
supervisor/api/panel/frontend_es5/94873099.js
Normal file
@@ -0,0 +1 @@
|
||||
!function(){"use strict";var n,t,e={14971:function(n,t,e){var r,o,i=e(93217),u=e(9902),a=e.n(u),f=(e(58556),e(62173)),c=function(n,t,e){if("input"===n){if("type"===t&&"checkbox"===e||"checked"===t||"disabled"===t)return;return""}},s={renderMarkdown:function(n,t){var e,i=arguments.length>2&&void 0!==arguments[2]?arguments[2]:{};return r||(r=Object.assign({},(0,f.getDefaultWhiteList)(),{input:["type","disabled","checked"],"ha-icon":["icon"],"ha-svg-icon":["path"]})),i.allowSvg?(o||(o=Object.assign({},r,{svg:["xmlns","height","width"],path:["transform","stroke","d"],img:["src"]})),e=o):e=r,(0,f.filterXSS)(a()(n,t),{whiteList:e,onTagAttr:c})}};(0,i.Jj)(s)}},r={};function o(n){var t=r[n];if(void 0!==t)return t.exports;var i=r[n]={exports:{}};return e[n].call(i.exports,i,i.exports,o),i.exports}o.m=e,o.x=function(){var n=o.O(void 0,[191,468],(function(){return o(14971)}));return n=o.O(n)},n=[],o.O=function(t,e,r,i){if(!e){var u=1/0;for(s=0;s<n.length;s++){e=n[s][0],r=n[s][1],i=n[s][2];for(var a=!0,f=0;f<e.length;f++)(!1&i||u>=i)&&Object.keys(o.O).every((function(n){return o.O[n](e[f])}))?e.splice(f--,1):(a=!1,i<u&&(u=i));if(a){n.splice(s--,1);var c=r();void 0!==c&&(t=c)}}return t}i=i||0;for(var s=n.length;s>0&&n[s-1][2]>i;s--)n[s]=n[s-1];n[s]=[e,r,i]},o.n=function(n){var t=n&&n.__esModule?function(){return n.default}:function(){return n};return o.d(t,{a:t}),t},o.d=function(n,t){for(var e in t)o.o(t,e)&&!o.o(n,e)&&Object.defineProperty(n,e,{enumerable:!0,get:t[e]})},o.f={},o.e=function(n){return Promise.all(Object.keys(o.f).reduce((function(t,e){return o.f[e](n,t),t}),[]))},o.u=function(n){return{191:"2dbdaab4",468:"4a274bef"}[n]+".js"},o.o=function(n,t){return Object.prototype.hasOwnProperty.call(n,t)},o.p="/api/hassio/app/frontend_es5/",function(){var n={971:1};o.f.i=function(t,e){n[t]||importScripts(o.p+o.u(t))};var t=self.webpackChunkhome_assistant_frontend=self.webpackChunkhome_assistant_frontend||[],e=t.push.bind(t);t.push=function(t){var r=t[0],i=t[1],u=t[2];for(var a in i)o.o(i,a)&&(o.m[a]=i[a]);for(u&&u(o);r.length;)n[r.pop()]=1;e(t)}}(),t=o.x,o.x=function(){return Promise.all([o.e(191),o.e(468)]).then(t)};o.x()}();
|
BIN
supervisor/api/panel/frontend_es5/94873099.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/94873099.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/frontend_es5/a5a3a80a.js
Normal file
1
supervisor/api/panel/frontend_es5/a5a3a80a.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/frontend_es5/a5a3a80a.js.gz
Normal file
BIN
supervisor/api/panel/frontend_es5/a5a3a80a.js.gz
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user