mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-12 02:29:21 +00:00
Compare commits
1810 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a | ||
![]() |
39baea759a | ||
![]() |
80ddb1d262 | ||
![]() |
e24987a610 | ||
![]() |
9e5c276e3b | ||
![]() |
c33d31996d | ||
![]() |
aa1f08fe8a | ||
![]() |
d78689554a | ||
![]() |
5bee1d851c | ||
![]() |
ddb8eef4d1 | ||
![]() |
da513e7347 | ||
![]() |
4279d7fd16 | ||
![]() |
934eab2e8c | ||
![]() |
2a31edc768 | ||
![]() |
fcdd66dc6e | ||
![]() |
a65d3222b9 | ||
![]() |
36179596a0 | ||
![]() |
c083c850c1 | ||
![]() |
ff903d7b5a | ||
![]() |
dd603e1ec2 | ||
![]() |
a2f06b1553 | ||
![]() |
8115d2b3d3 | ||
![]() |
4f97bb9e0b | ||
![]() |
84d24a2c4d | ||
![]() |
b709061656 | ||
![]() |
cd9034b3f1 | ||
![]() |
25d324c73a | ||
![]() |
3a834d1a73 | ||
![]() |
e9fecb817d | ||
![]() |
56e70d7ec4 | ||
![]() |
2e73a85aa9 | ||
![]() |
1e119e9c03 | ||
![]() |
6f6e5c97df | ||
![]() |
6ef99974cf | ||
![]() |
8984b9aef6 | ||
![]() |
63e08b15bc | ||
![]() |
319b2b5d4c | ||
![]() |
bae7bb8ce4 | ||
![]() |
0b44df366c | ||
![]() |
f253c797af | ||
![]() |
0a8b1c2797 | ||
![]() |
3b45fb417b | ||
![]() |
2a2d92e3c5 | ||
![]() |
a320e42ed5 | ||
![]() |
fdef712e01 | ||
![]() |
5717ac19d7 | ||
![]() |
33d7d76fee | ||
![]() |
73bdaa623c | ||
![]() |
8ca8f59a0b | ||
![]() |
745af3c039 | ||
![]() |
5d17e1011a | ||
![]() |
826464c41b | ||
![]() |
a643df8cac | ||
![]() |
24ded99286 | ||
![]() |
6646eee504 | ||
![]() |
f55c10914e | ||
![]() |
b1e768f69e | ||
![]() |
4702f8bd5e | ||
![]() |
69959b2c97 | ||
![]() |
9d6f4f5392 | ||
![]() |
36b9a609bf | ||
![]() |
36ae0c82b6 | ||
![]() |
e11011ee51 | ||
![]() |
9125211a57 | ||
![]() |
3a4ef6ceb3 | ||
![]() |
ca82993278 | ||
![]() |
0925af91e3 | ||
![]() |
80bc32243c | ||
![]() |
f0d232880d | ||
![]() |
7c790dbbd9 | ||
![]() |
899b17e992 | ||
![]() |
d1b4521290 | ||
![]() |
9bb4feef29 | ||
![]() |
4bcdc98a31 | ||
![]() |
26f8c1df92 | ||
![]() |
a481ad73f3 | ||
![]() |
e4ac17fea6 | ||
![]() |
bcd940e95b | ||
![]() |
5365aa4466 | ||
![]() |
a0d106529c | ||
![]() |
bf1a9ec42d | ||
![]() |
fc5d97562f | ||
![]() |
f5c171e44f | ||
![]() |
a3c3f15806 | ||
![]() |
ef58a219ec | ||
![]() |
6708fe36e3 | ||
![]() |
e02fa2824c | ||
![]() |
a20f927082 | ||
![]() |
6d71e3fe81 | ||
![]() |
4056fcd75d | ||
![]() |
1e723cf0e3 | ||
![]() |
ce3f670597 | ||
![]() |
ce3d3d58ec | ||
![]() |
a92cab48e0 | ||
![]() |
ee76317392 | ||
![]() |
380ca13be1 | ||
![]() |
93f4c5e207 | ||
![]() |
e438858da0 | ||
![]() |
428a4dd849 | ||
![]() |
39cc8aaa13 | ||
![]() |
39a62864de | ||
![]() |
71a162a871 | ||
![]() |
05d7eff09a | ||
![]() |
7b8ad0782d | ||
![]() |
df3e9e3a5e | ||
![]() |
8cdc769ec8 | ||
![]() |
76e1304241 | ||
![]() |
eb9b1ff03d | ||
![]() |
b3b12d35fd | ||
![]() |
74485262e7 | ||
![]() |
615e68b29b | ||
![]() |
927b4695c9 | ||
![]() |
11811701d0 | ||
![]() |
05c8022db3 | ||
![]() |
a9ebb147c5 | ||
![]() |
ba8ca4d9ee | ||
![]() |
3574df1385 | ||
![]() |
b4497d231b | ||
![]() |
5aa9b0245a | ||
![]() |
4c72c3aafc | ||
![]() |
bf4f40f991 | ||
![]() |
603334f4f3 | ||
![]() |
46548af165 | ||
![]() |
8ef32b40c8 | ||
![]() |
fb25377087 | ||
![]() |
a75fd2d07e | ||
![]() |
e30f39e97e | ||
![]() |
4818ad7465 | ||
![]() |
5e4e9740c7 | ||
![]() |
d4e41dbf80 | ||
![]() |
cea1a1a15f | ||
![]() |
c2700b14dc | ||
![]() |
07d27170db | ||
![]() |
8eb8c07df6 | ||
![]() |
7bee6f884c | ||
![]() |
78dd20e314 | ||
![]() |
2a011b6448 | ||
![]() |
5c90370ec8 | ||
![]() |
120465b88d | ||
![]() |
c77292439a | ||
![]() |
0a0209f81a | ||
![]() |
69a7ed8a5c | ||
![]() |
8df35ab488 | ||
![]() |
a12567d0a8 | ||
![]() |
64fe190119 | ||
![]() |
e3ede66943 | ||
![]() |
2672b800d4 | ||
![]() |
c60d4bda92 | ||
![]() |
db9d0f2639 | ||
![]() |
02d4045ec3 | ||
![]() |
a308ea6927 | ||
![]() |
edc5e5e812 | ||
![]() |
23b65cb479 | ||
![]() |
e5eabd2143 | ||
![]() |
b0dd043975 | ||
![]() |
435a1096ed | ||
![]() |
21a9084ca0 | ||
![]() |
10d9135d86 | ||
![]() |
272d8b29f3 | ||
![]() |
3d665b9eec | ||
![]() |
c563f484c9 | ||
![]() |
38268ea4ea | ||
![]() |
c1ad64cddf | ||
![]() |
b898cd2a3a | ||
![]() |
937b31d845 | ||
![]() |
e4e655493b | ||
![]() |
387d2dcc2e | ||
![]() |
8abe33d48a | ||
![]() |
860442d5c4 | ||
![]() |
ce5183ce16 | ||
![]() |
3e69b04b86 | ||
![]() |
8b9cd4f122 | ||
![]() |
c0e3ccdb83 | ||
![]() |
e8cc85c487 | ||
![]() |
b3eff41692 | ||
![]() |
1ea63f185c | ||
![]() |
a513d5c09a | ||
![]() |
fb8216c102 | ||
![]() |
4f381d01df | ||
![]() |
de3382226e | ||
![]() |
77be830b72 | ||
![]() |
09c0e1320f | ||
![]() |
cc4ee59542 | ||
![]() |
1f448744f3 | ||
![]() |
ee2c257057 | ||
![]() |
be8439d4ac | ||
![]() |
981f2b193c | ||
![]() |
39087e09ce | ||
![]() |
59960efb9c | ||
![]() |
5a53bb5981 | ||
![]() |
a67fe69cbb | ||
![]() |
9ce2b0765f | ||
![]() |
2e53a48504 | ||
![]() |
8e4db0c3ec | ||
![]() |
4072b06faf | ||
![]() |
a2cf7ece70 | ||
![]() |
734fe3afde | ||
![]() |
7f3bc91c1d | ||
![]() |
9c2c95757d | ||
![]() |
b5ed6c586a | ||
![]() |
35033d1f76 | ||
![]() |
9e41d0c5b0 | ||
![]() |
62e92fada9 | ||
![]() |
ae0a1a657f | ||
![]() |
81e511ba8e | ||
![]() |
d89cb91c8c | ||
![]() |
dc31b6e6fe | ||
![]() |
930a32de1a | ||
![]() |
e40f2ed8e3 | ||
![]() |
abbd3d1078 | ||
![]() |
63c9948456 | ||
![]() |
b6c81d779a | ||
![]() |
2480c83169 | ||
![]() |
334cc66cf6 | ||
![]() |
3cf189ad94 | ||
![]() |
6ffb94a0f5 | ||
![]() |
3593826441 | ||
![]() |
0a0a62f238 | ||
![]() |
41ce9913d2 | ||
![]() |
b77c42384d | ||
![]() |
138bb12f98 | ||
![]() |
4fe2859f4e | ||
![]() |
0768b2b4bc | ||
![]() |
e6f1772a93 | ||
![]() |
5374b2b3b9 | ||
![]() |
1196788856 | ||
![]() |
9f3f47eb80 | ||
![]() |
1a90a478f2 | ||
![]() |
ee773f3b63 | ||
![]() |
5ffc27f60c | ||
![]() |
4c13dfb43c | ||
![]() |
bc099f0d81 | ||
![]() |
b26dd0af19 | ||
![]() |
0dee5bd763 | ||
![]() |
0765387ad8 | ||
![]() |
a07517bd3c | ||
![]() |
e5f0d80d96 | ||
![]() |
2fc5e3b7d9 | ||
![]() |
778bc46848 | ||
![]() |
882586b246 | ||
![]() |
b7c07a2555 | ||
![]() |
814b504fa9 | ||
![]() |
7ae430e7a8 | ||
![]() |
0e7e95ba20 | ||
![]() |
e577d8acb2 | ||
![]() |
0a76ab5054 | ||
![]() |
03c5596e04 | ||
![]() |
3af4e14e83 | ||
![]() |
7c8cf57820 | ||
![]() |
8d84a8a62e | ||
![]() |
08c45060bd | ||
![]() |
7ca8d2811b | ||
![]() |
bb6898b032 | ||
![]() |
cd86c6814e | ||
![]() |
b67e116650 | ||
![]() |
57ce411fb6 | ||
![]() |
85ed4d9e8d | ||
![]() |
ccb39da569 | ||
![]() |
dd7ba64d32 | ||
![]() |
de3edb1654 | ||
![]() |
d262151727 | ||
![]() |
a37c90af96 | ||
![]() |
0a3a752b4c | ||
![]() |
0a34f427f8 | ||
![]() |
157740e374 | ||
![]() |
b0e994f3f5 | ||
![]() |
f374852801 | ||
![]() |
709f034f2e | ||
![]() |
6d6deb8c66 | ||
![]() |
5771b417bc | ||
![]() |
51efcefdab | ||
![]() |
d31ab5139d | ||
![]() |
ce18183daa | ||
![]() |
b8b73cf880 | ||
![]() |
5291e6c1f3 | ||
![]() |
626a9f06c4 | ||
![]() |
72338eb5b8 | ||
![]() |
7bd77c6e99 | ||
![]() |
69151b962a | ||
![]() |
86305d4fe4 | ||
![]() |
d5c3850a3f | ||
![]() |
3e645b6175 | ||
![]() |
89dc78bc05 | ||
![]() |
164c403d05 | ||
![]() |
5e8007453f | ||
![]() |
0a0d97b084 | ||
![]() |
eb604ed92d | ||
![]() |
c47828dbaa | ||
![]() |
ea437dc745 | ||
![]() |
c16a208b39 | ||
![]() |
55d803b2a0 | ||
![]() |
611f6f2829 | ||
![]() |
b94df76731 | ||
![]() |
218619e7f0 | ||
![]() |
273eed901a | ||
![]() |
8ea712a937 | ||
![]() |
658449a7a0 | ||
![]() |
968c471591 | ||
![]() |
b4665f3907 | ||
![]() |
496cee1ec4 | ||
![]() |
0f8c80f3ba | ||
![]() |
6c28f82239 | ||
![]() |
def32abb57 | ||
![]() |
f57a241b9e | ||
![]() |
11a7e8b15d | ||
![]() |
fa4f7697b7 | ||
![]() |
6098b7de8e | ||
![]() |
0a382ce54d | ||
![]() |
dd53aaa30c | ||
![]() |
31e175a15a | ||
![]() |
4c80727bcc | ||
![]() |
b2c3157361 | ||
![]() |
dc4f38ebd0 | ||
![]() |
7c9437c6ee | ||
![]() |
9ce9e10dfd | ||
![]() |
4e94043bca | ||
![]() |
749d45bf13 | ||
![]() |
ce99b3e259 | ||
![]() |
2c84daefab | ||
![]() |
dc1933fa88 | ||
![]() |
6970cebf80 | ||
![]() |
a234006de2 | ||
![]() |
2484149323 | ||
![]() |
778148424c | ||
![]() |
55f4a2395e | ||
![]() |
5a45d47ed8 | ||
![]() |
da601d1483 | ||
![]() |
e98a1272e9 | ||
![]() |
90e9cf788b | ||
![]() |
ec387c3010 | ||
![]() |
7e5a960c98 | ||
![]() |
f1bcbf2416 | ||
![]() |
bce144e197 | ||
![]() |
86a3735d83 | ||
![]() |
decf254e5f | ||
![]() |
e10fe16f21 | ||
![]() |
996891a740 | ||
![]() |
7385d026ea | ||
![]() |
09f43d6f3c | ||
![]() |
6906e757dd | ||
![]() |
963d242afa | ||
![]() |
3ed7cbe2ed | ||
![]() |
0da924f10b | ||
![]() |
76411da0a7 | ||
![]() |
ce87a72cf0 | ||
![]() |
f8c9e2f295 | ||
![]() |
00af027e51 | ||
![]() |
c91fce3281 | ||
![]() |
fb6df18ce9 | ||
![]() |
31f5c6f938 | ||
![]() |
d3a44b2992 | ||
![]() |
b537a03e6d | ||
![]() |
46093379e4 | ||
![]() |
1b17d90504 | ||
![]() |
7d42dd7ac2 | ||
![]() |
f35dcfcfd3 | ||
![]() |
c4f223c38a | ||
![]() |
71362f2c76 | ||
![]() |
96beac9fd9 | ||
![]() |
608c0e5076 | ||
![]() |
16ef6d82d2 | ||
![]() |
51940222be | ||
![]() |
21f3c4820b | ||
![]() |
214c6f919e | ||
![]() |
d9d438d571 | ||
![]() |
cf60d1f55c | ||
![]() |
f9aa12cbad | ||
![]() |
76266cc18b | ||
![]() |
50b9506ff3 | ||
![]() |
754cd64213 | ||
![]() |
113b62ee77 | ||
![]() |
d9874c4c3e | ||
![]() |
ca44e858c5 | ||
![]() |
c7ca4de307 | ||
![]() |
b77146a4e0 | ||
![]() |
45b4800378 | ||
![]() |
7f9232d2b9 | ||
![]() |
d90426f745 | ||
![]() |
c2deabb672 | ||
![]() |
ead5993f3e | ||
![]() |
1bcd74e8fa | ||
![]() |
118da3c275 | ||
![]() |
d7bb9013d4 | ||
![]() |
812c46d82b | ||
![]() |
c0462b28cd | ||
![]() |
82b2f66920 | ||
![]() |
01da42e1b6 | ||
![]() |
d652d22547 | ||
![]() |
baea84abe6 | ||
![]() |
c2d705a42a | ||
![]() |
f10b433e1f | ||
![]() |
67f562a846 | ||
![]() |
1edec61133 | ||
![]() |
c13a33bf71 | ||
![]() |
2ae93ae7b1 | ||
![]() |
8451020afe | ||
![]() |
a48e568efc | ||
![]() |
dee2808cb5 | ||
![]() |
06a2ab26a2 | ||
![]() |
45de0f2f39 | ||
![]() |
bac5f704dc | ||
![]() |
79669a5d04 | ||
![]() |
a6e712c9ea | ||
![]() |
069fe99699 | ||
![]() |
4754f067ad | ||
![]() |
dce9818812 | ||
![]() |
d054b6dbb7 | ||
![]() |
3093165325 | ||
![]() |
fd9c5bd412 | ||
![]() |
9a8850fecd | ||
![]() |
b12175ab9a | ||
![]() |
b52f90187b | ||
![]() |
4eb02f474d | ||
![]() |
dfdcddfd0b | ||
![]() |
0391277bad | ||
![]() |
73643b9bfe | ||
![]() |
93a52b8382 | ||
![]() |
7a91bb1f6c | ||
![]() |
26efa998a1 | ||
![]() |
fc9f3fee0a | ||
![]() |
ec19bd570b | ||
![]() |
3335bad9e1 | ||
![]() |
71ae334e24 | ||
![]() |
0807651fbd | ||
![]() |
7026d42d77 | ||
![]() |
31047b9ec2 | ||
![]() |
714791de8f | ||
![]() |
c544fff2b2 | ||
![]() |
fc45670686 | ||
![]() |
5cefa0a2ee | ||
![]() |
a1910d4135 | ||
![]() |
f1fecdde3a | ||
![]() |
9ba4ea7d18 | ||
![]() |
58a455d639 | ||
![]() |
3ea85f6a28 | ||
![]() |
4e1469ada4 | ||
![]() |
5778f78f28 | ||
![]() |
227125cc0b | ||
![]() |
b36e178c45 | ||
![]() |
32c9198fb2 | ||
![]() |
6983dcc267 | ||
![]() |
813fcc41f0 | ||
![]() |
f4e9dd0f1c | ||
![]() |
7f074142bf | ||
![]() |
b6df37628d | ||
![]() |
7867eded50 | ||
![]() |
311abb8a90 | ||
![]() |
21303f4b05 | ||
![]() |
da3270af67 | ||
![]() |
35aae69f23 | ||
![]() |
118a2e1951 | ||
![]() |
9053341581 | ||
![]() |
27532a8a00 | ||
![]() |
7fdfa630b5 | ||
![]() |
3974d5859f | ||
![]() |
aa1c765c4b | ||
![]() |
e78385e7ea | ||
![]() |
9d59b56c94 | ||
![]() |
9d72dcabfc | ||
![]() |
a0b5d0b67e | ||
![]() |
2b5520405f | ||
![]() |
ca376b3fcd | ||
![]() |
11e3c0c547 | ||
![]() |
9da136e037 | ||
![]() |
9b3e59d876 | ||
![]() |
7a592795b5 | ||
![]() |
5b92137699 | ||
![]() |
7520cdfeb4 | ||
![]() |
0ada791e3a | ||
![]() |
73afced4dc | ||
![]() |
633a2e93bf | ||
![]() |
07c4058a8c | ||
![]() |
b6f3938b14 | ||
![]() |
57534fac96 | ||
![]() |
4a03e72983 | ||
![]() |
ddb29ea9b1 | ||
![]() |
95179c30f7 | ||
![]() |
f49970ce2c | ||
![]() |
790818d1aa | ||
![]() |
62f675e613 | ||
![]() |
f33434fb01 | ||
![]() |
254d6aee32 | ||
![]() |
a5ecd597ed | ||
![]() |
0fab3e940a | ||
![]() |
60fbebc16b | ||
![]() |
ec366d8112 | ||
![]() |
b8818788c9 | ||
![]() |
e23f6f6998 | ||
![]() |
05b58d76b9 | ||
![]() |
644d13e3fa | ||
![]() |
9de71472d4 | ||
![]() |
bf28227b91 | ||
![]() |
4c1ee49068 | ||
![]() |
6e7cf5e4c9 | ||
![]() |
11f8c97347 | ||
![]() |
a1461fd518 | ||
![]() |
fa5c2e37d3 | ||
![]() |
1f091b20ad | ||
![]() |
d3b4a03851 | ||
![]() |
fb12fee59b | ||
![]() |
7a87d2334a | ||
![]() |
9591e71138 | ||
![]() |
cecad526a2 | ||
![]() |
53dab4ee45 | ||
![]() |
8abbba46c7 | ||
![]() |
0f01ac1b59 | ||
![]() |
aa8ab593c0 | ||
![]() |
84f791220e | ||
![]() |
cee2c5469f | ||
![]() |
6e75964a8b | ||
![]() |
5ab5036504 | ||
![]() |
000a3c1f7e | ||
![]() |
8ea123eb94 | ||
![]() |
571c42ef7d | ||
![]() |
8443da0b9f | ||
![]() |
7dbbcf24c8 | ||
![]() |
468cb0c36b | ||
![]() |
78e093df96 | ||
![]() |
ec4d7dab21 | ||
![]() |
d00ee0adea | ||
![]() |
55d5ee4ed4 | ||
![]() |
0e51d74265 | ||
![]() |
916f3caedd | ||
![]() |
ff80ccce64 | ||
![]() |
23f28b38e9 | ||
![]() |
da425a0530 | ||
![]() |
79dca1608e | ||
![]() |
33b615e40d | ||
![]() |
c825c40c4d | ||
![]() |
8beb723cc2 | ||
![]() |
94fd24c251 | ||
![]() |
bf75a8a439 | ||
![]() |
36cdb05387 | ||
![]() |
dccc652d42 | ||
![]() |
74e03a9a2e | ||
![]() |
2f6df3a946 | ||
![]() |
2872be6385 | ||
![]() |
af19e95c81 | ||
![]() |
e5451973bd | ||
![]() |
4ef8c9d633 | ||
![]() |
4a9dcb540e | ||
![]() |
61eefea358 | ||
![]() |
f2a5512bbf | ||
![]() |
2f4e114f25 | ||
![]() |
c91bac2527 | ||
![]() |
52da7605f5 | ||
![]() |
267791833e | ||
![]() |
67dcf1563b | ||
![]() |
ccff0f5b9e | ||
![]() |
9f8ad05471 | ||
![]() |
c2299ef8da | ||
![]() |
f5845564db | ||
![]() |
17904d70d8 | ||
![]() |
622e99e04c | ||
![]() |
061420f279 | ||
![]() |
3d459f1b8b | ||
![]() |
5f3dd6190a | ||
![]() |
ac824d3af6 | ||
![]() |
dd25c29544 | ||
![]() |
5cbdbffbb2 | ||
![]() |
bb81f14c2c | ||
![]() |
cecefd6972 | ||
![]() |
ff7f6a0b4c | ||
![]() |
1dc9f35e12 | ||
![]() |
051b63c7cc | ||
![]() |
aac4b9b24a | ||
![]() |
1a208a20b6 | ||
![]() |
b1e8722ead | ||
![]() |
a66af6e903 | ||
![]() |
0c345fc615 | ||
![]() |
087b082a6b | ||
![]() |
0b85209eae | ||
![]() |
d81bc7de46 | ||
![]() |
e3a99b9f89 | ||
![]() |
5d319b37ea | ||
![]() |
9f25606986 | ||
![]() |
ecd12732ee | ||
![]() |
85fbde8e36 | ||
![]() |
6e6c2c3efb | ||
![]() |
0d4a808449 | ||
![]() |
087f746647 | ||
![]() |
640d66ad1a | ||
![]() |
f5f5ed83af | ||
![]() |
95f01a1161 | ||
![]() |
b84e7e7d94 | ||
![]() |
5d7018f3f0 | ||
![]() |
d87a85ceb5 | ||
![]() |
9ab6e80b6f | ||
![]() |
78e91e859e | ||
![]() |
9eee8eade6 | ||
![]() |
124ce0b8b7 | ||
![]() |
00e7d96472 | ||
![]() |
398815efd8 | ||
![]() |
bdc2bdcf56 | ||
![]() |
68eafb0a7d | ||
![]() |
7ca2fd7193 | ||
![]() |
ec823edd8f | ||
![]() |
858c7a1fa7 | ||
![]() |
6ac45a24fc | ||
![]() |
9430b39042 | ||
![]() |
ae7466ccfe | ||
![]() |
2c17fe5da8 | ||
![]() |
a0fb91af29 | ||
![]() |
f626e31fd3 | ||
![]() |
0151a149fd | ||
![]() |
9dea93142b | ||
![]() |
7f878bfac0 | ||
![]() |
ebe9ae2341 | ||
![]() |
e777bbd024 | ||
![]() |
2116d56124 | ||
![]() |
0b6a82b018 | ||
![]() |
b4ea28af4e | ||
![]() |
22f59712df | ||
![]() |
efe95f7bab | ||
![]() |
200c68f67f | ||
![]() |
dcefec7b99 | ||
![]() |
5db798bcf8 | ||
![]() |
70005296cc | ||
![]() |
f2bf8dea93 | ||
![]() |
fee858c956 | ||
![]() |
e3ae48c8ff | ||
![]() |
fa9e20385e | ||
![]() |
f51c9704e0 | ||
![]() |
57c58d81c0 | ||
![]() |
1ec1082068 | ||
![]() |
35b7c2269c | ||
![]() |
cc3e6ec6fd | ||
![]() |
4df42e054d | ||
![]() |
1b481e0b37 | ||
![]() |
3aa4cdf540 | ||
![]() |
029f277945 | ||
![]() |
e7e0b9adda | ||
![]() |
5fbff75da8 | ||
![]() |
58299a0389 | ||
![]() |
1151d7e17b | ||
![]() |
b56ed547e3 | ||
![]() |
a71ebba940 | ||
![]() |
4fcb516c75 | ||
![]() |
22142d32d2 | ||
![]() |
21194f1411 | ||
![]() |
09df046fa8 | ||
![]() |
63d3889d5c | ||
![]() |
0ffc0559e2 | ||
![]() |
78118a502c | ||
![]() |
946cc3d618 | ||
![]() |
c40a3f18e9 | ||
![]() |
f01945bf8c | ||
![]() |
0f72db45f9 | ||
![]() |
83510341b6 | ||
![]() |
70dd6593e4 | ||
![]() |
60ba2db561 | ||
![]() |
5820d16419 | ||
![]() |
9f9ff0d1ad | ||
![]() |
806161e3ac | ||
![]() |
44ae9c7b63 | ||
![]() |
75d24ba534 | ||
![]() |
13243cd02c | ||
![]() |
411fad8a45 | ||
![]() |
5fe9d63c79 | ||
![]() |
33095f8792 | ||
![]() |
0253722369 | ||
![]() |
495c45564a | ||
![]() |
8517b43e85 | ||
![]() |
033ea4e7dc | ||
![]() |
a0c9e5ad26 | ||
![]() |
408d6eafcc | ||
![]() |
054e357483 | ||
![]() |
cb520bff23 | ||
![]() |
024ebe0026 | ||
![]() |
7b62e2f07b | ||
![]() |
7d52b3ba01 | ||
![]() |
46caa23319 | ||
![]() |
9aa5eda2c8 | ||
![]() |
f48182a69c | ||
![]() |
788f883490 | ||
![]() |
e84e82d018 | ||
![]() |
20e73796b8 | ||
![]() |
7769d6fff1 | ||
![]() |
561e80c2be | ||
![]() |
96f47a4c32 | ||
![]() |
7482d6dd45 | ||
![]() |
aea31ee6dd | ||
![]() |
de43965ecb | ||
![]() |
baa61c6aa0 | ||
![]() |
cb22dafb3c | ||
![]() |
ea26784c3e | ||
![]() |
72332ed40f | ||
![]() |
46f2bf16a8 | ||
![]() |
e2725f8033 | ||
![]() |
9084ac119f | ||
![]() |
41943ba61a | ||
![]() |
33794669a1 | ||
![]() |
fe155a4ff0 | ||
![]() |
124e487ef7 | ||
![]() |
f361916a60 | ||
![]() |
20afa1544b | ||
![]() |
c08d5af4db | ||
![]() |
dc341c8af8 | ||
![]() |
2507b52adb | ||
![]() |
1302708135 | ||
![]() |
1314812f92 | ||
![]() |
f739e3ed11 | ||
![]() |
abb526fc0f | ||
![]() |
efb1a24b8f | ||
![]() |
bc0835963d | ||
![]() |
316190dff8 | ||
![]() |
029ead0c7c | ||
![]() |
a85172f30b | ||
![]() |
dfe2532813 | ||
![]() |
cf3bb23629 | ||
![]() |
2132042aca | ||
![]() |
19e448fc54 | ||
![]() |
a4e0fb8e99 | ||
![]() |
5b72e2887e | ||
![]() |
d2b6ec1b7e | ||
![]() |
4b541a23c4 | ||
![]() |
99869449ae | ||
![]() |
eab73f3895 | ||
![]() |
9e96615ffa | ||
![]() |
350010feb5 | ||
![]() |
7395e4620b | ||
![]() |
7d91ae4513 | ||
![]() |
343f759983 | ||
![]() |
24ee3f8cc0 | ||
![]() |
c143eadb62 | ||
![]() |
e7df38f4d1 | ||
![]() |
3e42318ac8 | ||
![]() |
c6e5d2932e | ||
![]() |
1aaf21a350 | ||
![]() |
f185eece8a | ||
![]() |
9d951280ef | ||
![]() |
3f598bafc0 | ||
![]() |
cddd859f56 | ||
![]() |
e7adf50ec1 | ||
![]() |
ac437f809a | ||
![]() |
f13dee9b9d | ||
![]() |
00855c0909 | ||
![]() |
1fafed5a07 | ||
![]() |
7adb81b350 | ||
![]() |
4647035b00 | ||
![]() |
8ad7344e02 | ||
![]() |
f1c46b3385 | ||
![]() |
7f84073b12 | ||
![]() |
e383a11bb7 | ||
![]() |
cc113e2251 | ||
![]() |
c5a3830c7d | ||
![]() |
a2abadc970 | ||
![]() |
db444b89d3 | ||
![]() |
77881e8a58 | ||
![]() |
0b15f88da3 | ||
![]() |
7c6bf96f6f | ||
![]() |
dc77e2d8d9 | ||
![]() |
68824fab4f | ||
![]() |
d6b3a36714 | ||
![]() |
8ab1f703c7 | ||
![]() |
95a4e292aa | ||
![]() |
3b9252558f | ||
![]() |
4a324dccc6 | ||
![]() |
8fffb0f8b5 | ||
![]() |
87adfce211 | ||
![]() |
297813f6e6 | ||
![]() |
362315852a | ||
![]() |
d221f36cf8 | ||
![]() |
9e18589b6b | ||
![]() |
c4d09210e1 | ||
![]() |
43797c5eb5 | ||
![]() |
fe38fe94dc | ||
![]() |
f185291eca | ||
![]() |
7541ae6476 | ||
![]() |
d94715be2b | ||
![]() |
99cc5972c8 | ||
![]() |
3d101a24a1 | ||
![]() |
2ed3ddf05b | ||
![]() |
10b3658bd7 | ||
![]() |
9f5903089e | ||
![]() |
0593885ed4 | ||
![]() |
3efbe11d49 | ||
![]() |
1c2e0e5749 | ||
![]() |
f64da6a547 | ||
![]() |
94fba7e175 | ||
![]() |
a59245e6bb | ||
![]() |
217c1acc62 | ||
![]() |
2c0a68bd8f | ||
![]() |
e37ffd6107 | ||
![]() |
3bde598fa7 | ||
![]() |
53f42ff934 | ||
![]() |
9041eb9e9a | ||
![]() |
70ac395232 | ||
![]() |
82f68b4a7b | ||
![]() |
2b2f3214e9 | ||
![]() |
1c0d63a02e | ||
![]() |
de77215630 | ||
![]() |
f300b843c1 | ||
![]() |
0bb81136bb | ||
![]() |
2a81ced817 | ||
![]() |
7363951a9a | ||
![]() |
6f770b78af | ||
![]() |
10219a348f | ||
![]() |
23d1013cfa | ||
![]() |
05980d4147 | ||
![]() |
e5e25c895f | ||
![]() |
b486883ff6 | ||
![]() |
42dd4d9557 | ||
![]() |
7dff9e09a7 | ||
![]() |
c315b026a3 | ||
![]() |
a4ba4c80e8 | ||
![]() |
ccd48b63a2 | ||
![]() |
6d5f70ced6 | ||
![]() |
ccffb4b786 | ||
![]() |
68dbbe212c | ||
![]() |
5df869e08a | ||
![]() |
63b9e023b4 | ||
![]() |
8f357739ec | ||
![]() |
808fc0f8b6 | ||
![]() |
1a6f6085e6 | ||
![]() |
0de3e9a233 | ||
![]() |
f1237f124f | ||
![]() |
69142b6fb0 | ||
![]() |
28f295a1e2 | ||
![]() |
55c2127baa | ||
![]() |
265c36b345 | ||
![]() |
9f081fe32f | ||
![]() |
e4fb6ad727 | ||
![]() |
1040a1624a | ||
![]() |
a2ee2852a0 | ||
![]() |
b2e3b726d9 | ||
![]() |
0f4e557552 | ||
![]() |
2efa9f9483 | ||
![]() |
43e6ca8f4a | ||
![]() |
34d67a7bcd | ||
![]() |
5a6051f9a1 | ||
![]() |
157e48f946 | ||
![]() |
9469a258ff | ||
![]() |
fd0aeb5341 | ||
![]() |
4d4a4ce043 | ||
![]() |
678f77cc05 | ||
![]() |
6c30248389 | ||
![]() |
fda7c1cf11 | ||
![]() |
364e5ec0b8 | ||
![]() |
947bf7799c | ||
![]() |
e22836d706 | ||
![]() |
6c8fcbfb80 | ||
![]() |
f1fe1877fe | ||
![]() |
3c0831c8eb | ||
![]() |
35b3f364c9 | ||
![]() |
c4299b51cd | ||
![]() |
31caed20fa | ||
![]() |
41fed656c1 | ||
![]() |
c5ee2ebc49 | ||
![]() |
743a218219 | ||
![]() |
093ef17fb7 | ||
![]() |
a41912be0a | ||
![]() |
5becd51b50 | ||
![]() |
ef7a375396 | ||
![]() |
19879e3287 | ||
![]() |
d1c4f342fc | ||
![]() |
2f62b7046c | ||
![]() |
0cca8f522b | ||
![]() |
39decec001 | ||
![]() |
3489db2768 | ||
![]() |
3382688669 | ||
![]() |
cf00ce7d78 | ||
![]() |
2c714aa003 | ||
![]() |
1e7858bf06 | ||
![]() |
4e428c2e41 | ||
![]() |
b95ab3e95a | ||
![]() |
0dd7f8fbaa | ||
![]() |
a2789ac540 | ||
![]() |
a785e10a3f | ||
![]() |
10dad5a209 | ||
![]() |
9327b24d44 | ||
![]() |
7d02bb2fe9 | ||
![]() |
a2d3ee0d67 | ||
![]() |
d29fab69e8 | ||
![]() |
6205f40298 | ||
![]() |
6b169f3f17 | ||
![]() |
0d4a5a7ffb | ||
![]() |
dac90d29dd | ||
![]() |
7e815633e7 | ||
![]() |
f062f31ca2 | ||
![]() |
1374f90433 | ||
![]() |
b692b19a4d | ||
![]() |
92d5b14cf5 | ||
![]() |
6a84829c16 | ||
![]() |
7036ecbd0a | ||
![]() |
19b5059972 | ||
![]() |
cebc377fa7 | ||
![]() |
d36c3919d7 | ||
![]() |
0684427373 | ||
![]() |
8ff79e85bf | ||
![]() |
ee4b28a490 | ||
![]() |
fddd5b8860 | ||
![]() |
72279072ac | ||
![]() |
0b70448273 | ||
![]() |
4eb24fcbc5 | ||
![]() |
06edf59d14 | ||
![]() |
36ca851bc2 | ||
![]() |
a4e453bf83 | ||
![]() |
d211eec66f | ||
![]() |
db8540d4ab | ||
![]() |
30e270e7c0 | ||
![]() |
9734307551 | ||
![]() |
c650f8d1e1 | ||
![]() |
10005898f8 | ||
![]() |
716389e0c1 | ||
![]() |
658729feb5 | ||
![]() |
ae7808eb2a | ||
![]() |
d8e0e9e0b0 | ||
![]() |
a860a3c122 | ||
![]() |
fe60d526b9 | ||
![]() |
769904778f | ||
![]() |
a3a40c79d6 | ||
![]() |
b44f613136 | ||
![]() |
801be9c60b | ||
![]() |
b6db6a1287 | ||
![]() |
4181174bcc | ||
![]() |
3be46e6011 | ||
![]() |
98b93efc5c | ||
![]() |
6156019c2f | ||
![]() |
80d60148a9 | ||
![]() |
8baf59a608 | ||
![]() |
b546365aaa | ||
![]() |
0a68698912 | ||
![]() |
45288a2491 | ||
![]() |
f34a175e4f | ||
![]() |
6e7e145822 | ||
![]() |
9abebe2d5d | ||
![]() |
b0c5884c3f | ||
![]() |
a79e6a8eea | ||
![]() |
c1f1aed9ca | ||
![]() |
65b0e17b5b | ||
![]() |
6947131b47 | ||
![]() |
914dd53da0 | ||
![]() |
58616ef686 | ||
![]() |
563e0c1e0e | ||
![]() |
437070fd7a | ||
![]() |
baa9cf451c | ||
![]() |
c2918d4519 | ||
![]() |
1efdcd4691 | ||
![]() |
2a43087ed7 | ||
![]() |
5716324934 | ||
![]() |
ae267e0380 | ||
![]() |
3918a2a228 | ||
![]() |
e375fc36d3 | ||
![]() |
f5e29b4651 | ||
![]() |
524d875516 | ||
![]() |
60bdc00ce9 | ||
![]() |
073166190f | ||
![]() |
b80e4d7d70 | ||
![]() |
cc434e27cf | ||
![]() |
8377e04b62 | ||
![]() |
0a47fb9c83 | ||
![]() |
a5d3c850e9 | ||
![]() |
d6391f62be | ||
![]() |
c6f302e448 | ||
![]() |
9706022c21 | ||
![]() |
1d858f4920 | ||
![]() |
e09ba30d46 | ||
![]() |
38ec3d14ed | ||
![]() |
8ee9380cc7 | ||
![]() |
6e74e4c008 | ||
![]() |
5ebc58851b | ||
![]() |
16b09bbfc5 | ||
![]() |
d4b5fc79f4 | ||
![]() |
e51c044ccd | ||
![]() |
d3b1ba81f7 | ||
![]() |
26f55f02c0 | ||
![]() |
8050707ff9 | ||
![]() |
46252030cf | ||
![]() |
681fa835ef | ||
![]() |
d6560eb976 | ||
![]() |
3770b307af | ||
![]() |
0dacbb31be | ||
![]() |
bbdbd756a7 | ||
![]() |
508e38e622 | ||
![]() |
ffe45d0d02 | ||
![]() |
9206d1acf8 | ||
![]() |
da867ef8ef | ||
![]() |
4826201e51 | ||
![]() |
463c97f9e7 | ||
![]() |
3983928c6c | ||
![]() |
15e626027f | ||
![]() |
d46810752e | ||
![]() |
3d10b502a0 | ||
![]() |
433c5cef3b | ||
![]() |
697caf553a | ||
![]() |
1e11359c71 | ||
![]() |
5285431825 | ||
![]() |
7743a572a9 | ||
![]() |
3b974920d3 | ||
![]() |
6bc9792248 | ||
![]() |
da55f6fb10 | ||
![]() |
ffa90a3407 | ||
![]() |
0a13ea3743 | ||
![]() |
0e2e588145 | ||
![]() |
b8c50fee36 | ||
![]() |
8cb0b7c498 | ||
![]() |
699fcdafba | ||
![]() |
b4d5aeb5d0 | ||
![]() |
d067dd643e | ||
![]() |
65a2bf2d18 | ||
![]() |
e826e8184f | ||
![]() |
dacbde7d77 | ||
![]() |
5b0587b672 | ||
![]() |
f0320c0f6d | ||
![]() |
e05c32df25 | ||
![]() |
9c40c32e95 | ||
![]() |
ac60de0360 | ||
![]() |
587047f9d6 | ||
![]() |
e815223047 | ||
![]() |
b6fb5ab950 | ||
![]() |
a0906937c4 | ||
![]() |
07c47df369 | ||
![]() |
85e9a949cc | ||
![]() |
3933fb0664 | ||
![]() |
a885fbdb41 | ||
![]() |
210793eb34 | ||
![]() |
0235c7bce0 | ||
![]() |
4419c0fc6c | ||
![]() |
2f3701693d | ||
![]() |
3bf446cbdb | ||
![]() |
0c67cc13a1 | ||
![]() |
0b80d7b6f4 | ||
![]() |
23c35d4c80 | ||
![]() |
e939c29efa | ||
![]() |
ea0655b4e5 | ||
![]() |
4117ce2e86 | ||
![]() |
dec04386bf | ||
![]() |
b50756785e | ||
![]() |
b9538bdc67 | ||
![]() |
a928281bbe | ||
![]() |
4533d17e27 | ||
![]() |
546df6d001 | ||
![]() |
f14eef62ae | ||
![]() |
ee86770570 | ||
![]() |
385a4e9f6f | ||
![]() |
142cdcffca | ||
![]() |
eb6c753514 | ||
![]() |
c3b62c80fb | ||
![]() |
f77e176a6e | ||
![]() |
3f99dec858 | ||
![]() |
81b0cf55b0 | ||
![]() |
1d5d2dc731 | ||
![]() |
04f5ee0a80 | ||
![]() |
7a02777cfb | ||
![]() |
7257c44d27 | ||
![]() |
cb15602814 | ||
![]() |
0f2c333484 | ||
![]() |
6f2cf2ef85 | ||
![]() |
70a721a47d | ||
![]() |
b32947af98 | ||
![]() |
94b44ec7fe | ||
![]() |
5c8aa71c31 | ||
![]() |
a6c424b7c8 | ||
![]() |
38e40c342d | ||
![]() |
26d390b66e | ||
![]() |
baddafa552 | ||
![]() |
f443d3052b | ||
![]() |
8fc27ff28e | ||
![]() |
3784d759f5 | ||
![]() |
61037f3852 | ||
![]() |
db8aaecdbe | ||
![]() |
15a4541595 | ||
![]() |
50ae8e2335 | ||
![]() |
279df17ba4 | ||
![]() |
f8e6362283 | ||
![]() |
0c44064926 | ||
![]() |
73c437574c | ||
![]() |
69a2182c04 | ||
![]() |
ce80e6cd32 | ||
![]() |
054def09f7 | ||
![]() |
eebe90bd14 | ||
![]() |
6ea280ce60 | ||
![]() |
e992b70f92 | ||
![]() |
0f58bb35ba | ||
![]() |
56abfb6adc | ||
![]() |
8352d61f8d | ||
![]() |
51d585f299 | ||
![]() |
d017a52922 | ||
![]() |
78ec0d1314 | ||
![]() |
c84151e9e8 | ||
![]() |
e8e599cb8c | ||
![]() |
232b9ea239 | ||
![]() |
1c49351e66 | ||
![]() |
34d1f4725d | ||
![]() |
7cd81dcc95 | ||
![]() |
1bdd3d88de | ||
![]() |
d105552fa9 | ||
![]() |
b5af35bd6c | ||
![]() |
7d46487491 | ||
![]() |
38a599011e | ||
![]() |
e59e2fc8d7 | ||
![]() |
b9ce405ada | ||
![]() |
d7df423deb | ||
![]() |
99eea99e93 | ||
![]() |
63d82ce03e | ||
![]() |
13a2c1ecd9 | ||
![]() |
627ab4ee81 | ||
![]() |
54f45539be | ||
![]() |
53297205c8 | ||
![]() |
0f09fdfcce | ||
![]() |
24db0fdb86 | ||
![]() |
7349234638 | ||
![]() |
c691f2a559 | ||
![]() |
110cd32dc3 | ||
![]() |
26d8dc0ec6 | ||
![]() |
fd41bda828 | ||
![]() |
1e3868bb70 | ||
![]() |
ece6c644cf | ||
![]() |
6a5bd5a014 | ||
![]() |
664334f1ad | ||
![]() |
e5e28747d4 | ||
![]() |
c7956d95ae | ||
![]() |
5ce6abdbb6 | ||
![]() |
fad0185c26 | ||
![]() |
86faf32709 | ||
![]() |
19f413796d | ||
![]() |
8f94b4d63f | ||
![]() |
db263f84af | ||
![]() |
747810b729 | ||
![]() |
d6768f15a1 | ||
![]() |
6c75957578 | ||
![]() |
3a8307acfe | ||
![]() |
f20c7d42ee | ||
![]() |
9419fbff94 | ||
![]() |
3ac6c03637 | ||
![]() |
a95274f1b3 | ||
![]() |
9d2fb87cec | ||
![]() |
ce9c3565b6 | ||
![]() |
b0ec58ed1b | ||
![]() |
893a5f8dd3 | ||
![]() |
98064f6a90 | ||
![]() |
5146f89354 | ||
![]() |
fb46592d48 | ||
![]() |
b4fb5ac681 | ||
![]() |
4b7201dc59 | ||
![]() |
3a5a4e4c27 | ||
![]() |
70104a9280 | ||
![]() |
efbc7b17a1 | ||
![]() |
64c5e20fc4 | ||
![]() |
13498afa97 | ||
![]() |
f6375f1bd6 | ||
![]() |
8fd1599173 | ||
![]() |
63302b73b0 | ||
![]() |
f591f67a2a | ||
![]() |
cda3184a55 | ||
![]() |
afc811e975 | ||
![]() |
2e169dcb42 | ||
![]() |
34e24e184f | ||
![]() |
2e4751ed7d | ||
![]() |
8c82c467d4 | ||
![]() |
f3f6771534 | ||
![]() |
0a75a4dcbc | ||
![]() |
1a4542fc4e | ||
![]() |
7e0525749e | ||
![]() |
b33b26018d | ||
![]() |
66c93e7176 | ||
![]() |
5674d32bad | ||
![]() |
7a84972770 | ||
![]() |
638f0f5371 | ||
![]() |
dca1b6f1d3 | ||
![]() |
2b0ee109d6 | ||
![]() |
e7430d87d7 | ||
![]() |
9751c1de79 | ||
![]() |
c497167b64 | ||
![]() |
7fb2aca88b | ||
![]() |
0d544845b1 | ||
![]() |
602eb472f9 | ||
![]() |
f22fa46bdb | ||
![]() |
4171a28260 | ||
![]() |
55365a631a | ||
![]() |
547415b30b | ||
![]() |
cbf79f1fab | ||
![]() |
31cc1dce82 | ||
![]() |
8a11e6c845 | ||
![]() |
2df4f80aa5 | ||
![]() |
68566ee9e1 | ||
![]() |
fe04b7ec59 | ||
![]() |
38f96d7ddd | ||
![]() |
2b2edd6e98 | ||
![]() |
361969aca2 | ||
![]() |
e61e7f41f2 | ||
![]() |
75150fd149 | ||
![]() |
bd1c8be1e1 | ||
![]() |
f167197640 | ||
![]() |
f084ecc007 | ||
![]() |
65becbd0ae | ||
![]() |
f38e28a4d9 | ||
![]() |
2998cd94ff | ||
![]() |
79e2f3e8ab | ||
![]() |
13291f52f2 | ||
![]() |
4baa80c3de | ||
![]() |
be28a6b012 | ||
![]() |
d94ada6216 | ||
![]() |
b2d7743e06 | ||
![]() |
40324beb72 | ||
![]() |
c02f6913b3 | ||
![]() |
d56af22d5e | ||
![]() |
1795103086 | ||
![]() |
02e1689dd1 | ||
![]() |
ab4d96331f | ||
![]() |
cb881cba28 | ||
![]() |
44b247f397 | ||
![]() |
8bb43daf91 | ||
![]() |
a7e65613d6 | ||
![]() |
3c04c71401 | ||
![]() |
1353d52bd1 | ||
![]() |
7701457791 | ||
![]() |
b7820bc6a6 | ||
![]() |
df66102de0 | ||
![]() |
4b308d0de1 | ||
![]() |
4448ba886b | ||
![]() |
f39006be01 | ||
![]() |
e5204eef8a | ||
![]() |
1f07d47fd6 | ||
![]() |
ba352abf0b | ||
![]() |
2bf440a744 | ||
![]() |
3b26136636 | ||
![]() |
8249f042c0 | ||
![]() |
84bbaeee5f | ||
![]() |
b7620b7adf | ||
![]() |
5a80be9fd4 | ||
![]() |
a733886803 | ||
![]() |
834fd29fab | ||
![]() |
fd1caf8aa6 | ||
![]() |
975c9e8061 | ||
![]() |
0b3c5885ec | ||
![]() |
711b63e2d0 | ||
![]() |
c7b833b5eb | ||
![]() |
fd472b3084 | ||
![]() |
dcbb6a2160 | ||
![]() |
56fa1550d2 | ||
![]() |
e1f97860ee | ||
![]() |
6ab3fe18d9 | ||
![]() |
7969f3dfd7 | ||
![]() |
6f05b90e4e | ||
![]() |
3aa53d99d7 | ||
![]() |
3525f5a02f | ||
![]() |
04514a9f5c | ||
![]() |
1c915ef4cd | ||
![]() |
b03a2c5c5f | ||
![]() |
64988b285e | ||
![]() |
5c69dca7b3 | ||
![]() |
dfda7dc748 | ||
![]() |
cb7710c23f | ||
![]() |
f9b12a2eb2 | ||
![]() |
6a7617faad | ||
![]() |
05554ccf7e | ||
![]() |
a94e6c5303 | ||
![]() |
d6fc8892db | ||
![]() |
fa9b3b939e | ||
![]() |
70685c41be | ||
![]() |
a3209c4bde | ||
![]() |
f3e60f6c28 | ||
![]() |
7798e7cde2 | ||
![]() |
4af92b9d25 | ||
![]() |
eab958860c | ||
![]() |
09bba96940 | ||
![]() |
a34806d4e2 | ||
![]() |
f00b21dc28 | ||
![]() |
021946e181 | ||
![]() |
6cab017042 | ||
![]() |
5999b48be4 | ||
![]() |
57f3178408 | ||
![]() |
14013ac923 | ||
![]() |
d08343d040 | ||
![]() |
2f9f9c6165 | ||
![]() |
8ab0ed5047 | ||
![]() |
0119b52e11 | ||
![]() |
1382a7b36e | ||
![]() |
2eeb8bf388 | ||
![]() |
5af3040223 | ||
![]() |
47491ca55b | ||
![]() |
b06ce9b6b4 | ||
![]() |
38284e036d | ||
![]() |
27a079742d | ||
![]() |
7f33b3b5aa | ||
![]() |
261bda82db | ||
![]() |
c39d6357f3 | ||
![]() |
d1b30a0e95 | ||
![]() |
6a74893a30 | ||
![]() |
b61d5625fe | ||
![]() |
8d468328f3 | ||
![]() |
cd3b382902 | ||
![]() |
99cf44aacd | ||
![]() |
eaa489abec | ||
![]() |
46f323791d | ||
![]() |
ec72d38220 | ||
![]() |
f5b166a7f0 | ||
![]() |
8afde1e881 | ||
![]() |
f751b0e6fc | ||
![]() |
3809f20c6a | ||
![]() |
68390469df | ||
![]() |
4c122a0630 | ||
![]() |
d06696cd94 | ||
![]() |
8d094d5c70 | ||
![]() |
068c463c98 | ||
![]() |
fc95933098 | ||
![]() |
630137a576 | ||
![]() |
857f346b35 | ||
![]() |
d98b4f039f | ||
![]() |
8fee52da5e | ||
![]() |
0f9ad3658b | ||
![]() |
1155ee07e5 | ||
![]() |
fa687e982e | ||
![]() |
4e902af937 | ||
![]() |
6455ad14a7 | ||
![]() |
4753c058a3 | ||
![]() |
1567cbfe37 | ||
![]() |
3ed66c802e | ||
![]() |
980baf23a8 | ||
![]() |
d69af6a62b | ||
![]() |
863456525f | ||
![]() |
dae49df7b1 | ||
![]() |
282fc03687 | ||
![]() |
f9f7e07c52 | ||
![]() |
12a2ccf0ec | ||
![]() |
a98d76618a | ||
![]() |
7a59e7392b | ||
![]() |
446aff3fa6 | ||
![]() |
3272403141 | ||
![]() |
d1f265da9e | ||
![]() |
4915c935dd | ||
![]() |
e78d935824 | ||
![]() |
934ca64a32 | ||
![]() |
0860e6d202 | ||
![]() |
c3e1c8b58e | ||
![]() |
44e48095c7 | ||
![]() |
a13eb7841d | ||
![]() |
b5701c5878 | ||
![]() |
803eb0f8c9 | ||
![]() |
58c5ed7ba1 | ||
![]() |
c4d7d671d1 | ||
![]() |
9d88255225 | ||
![]() |
bfbc366f55 | ||
![]() |
0f30a23f3e | ||
![]() |
7e1bb42bb7 | ||
![]() |
251a43216e | ||
![]() |
4801b9903c | ||
![]() |
cd5a09938f | ||
![]() |
14bf834224 | ||
![]() |
8aec943a5c | ||
![]() |
d817e75d98 | ||
![]() |
fbd8abdcd5 | ||
![]() |
ca02977505 | ||
![]() |
6533b57c6d | ||
![]() |
0a818282d3 | ||
![]() |
ce2f5f9f7a | ||
![]() |
01f767e66c | ||
![]() |
106ab924e3 | ||
![]() |
d031594bf9 | ||
![]() |
f2f146063b | ||
![]() |
5abe7a3fb9 | ||
![]() |
f592971b6e | ||
![]() |
ed2caa0d81 | ||
![]() |
0b04c90b1f | ||
![]() |
2eac4b8d9b | ||
![]() |
143a358b0c | ||
![]() |
fa049066fc | ||
![]() |
3877dcf355 | ||
![]() |
bfa7443ae2 | ||
![]() |
253962df87 | ||
![]() |
f8fbee68f4 | ||
![]() |
3c5d4037f7 | ||
![]() |
772709dd75 | ||
![]() |
bcfd76d33c | ||
![]() |
2bbe7e7dc1 | ||
![]() |
dbcd090244 | ||
![]() |
a0a1fd4875 | ||
![]() |
d978ec00aa | ||
![]() |
e40963a686 | ||
![]() |
55ec1a84fa | ||
![]() |
cf154b57f3 | ||
![]() |
ebf4daf4cc | ||
![]() |
40e8f411ff | ||
![]() |
421b380043 | ||
![]() |
5ebf2068b2 | ||
![]() |
e5fc6846e0 | ||
![]() |
906c4e03fb | ||
![]() |
02c8baef68 | ||
![]() |
a14917e017 | ||
![]() |
7e5b2673dc | ||
![]() |
d31895123e | ||
![]() |
6c1456902e | ||
![]() |
03bed162f4 | ||
![]() |
f798e75e30 | ||
![]() |
710f8570d2 | ||
![]() |
4dfd11ffb4 | ||
![]() |
4e4368debb | ||
![]() |
30c7ddf4ef | ||
![]() |
7186f5a8c0 | ||
![]() |
f52d1c4509 | ||
![]() |
4dbece8e8e | ||
![]() |
f731c630a6 | ||
![]() |
0ac96c207e | ||
![]() |
e2a29b7290 | ||
![]() |
f107a73e28 | ||
![]() |
2c68e5801f | ||
![]() |
91502a0727 | ||
![]() |
872f1d0ae3 | ||
![]() |
3c4240a8a8 | ||
![]() |
7a470bb3ac | ||
![]() |
766a9af54e | ||
![]() |
ca303a62f2 | ||
![]() |
90030d3a28 | ||
![]() |
0ed48a7741 | ||
![]() |
a33d765776 | ||
![]() |
6bb4f0e369 | ||
![]() |
56a9f64730 | ||
![]() |
d5eb66bc0d | ||
![]() |
40343089b5 | ||
![]() |
1b887e38d6 | ||
![]() |
ba96f99cde | ||
![]() |
b7f5cc868b | ||
![]() |
c8343fdfb0 | ||
![]() |
91e4bf1676 | ||
![]() |
6dba8d4ef9 | ||
![]() |
65eaed4f90 | ||
![]() |
8233083392 | ||
![]() |
106378d1d0 | ||
![]() |
01d18d5ff3 | ||
![]() |
6d23f3bd1c | ||
![]() |
ef96579a29 | ||
![]() |
44f0a9f21a | ||
![]() |
d854307acb | ||
![]() |
334b41de71 | ||
![]() |
1da50eab7a | ||
![]() |
b119a42f4d | ||
![]() |
99aa438817 | ||
![]() |
99fa91f480 | ||
![]() |
93969d264d | ||
![]() |
711e199977 | ||
![]() |
4e645332c3 | ||
![]() |
df8afb3337 | ||
![]() |
255a33fc08 | ||
![]() |
d15b6f0294 | ||
![]() |
1aa24e40ae | ||
![]() |
c0bde4a488 | ||
![]() |
2a09b70294 | ||
![]() |
e35b0a54c1 | ||
![]() |
8287330c67 | ||
![]() |
6b16da93cd | ||
![]() |
c1cd9bba45 | ||
![]() |
e33420f26e | ||
![]() |
abd9683e11 | ||
![]() |
8cbeabbe21 | ||
![]() |
df7d988d2f | ||
![]() |
544c009b9c | ||
![]() |
b2e0babc60 | ||
![]() |
f7c79cbd3a | ||
![]() |
587e9618da | ||
![]() |
cb2dd3b81c | ||
![]() |
8d4dd7de3f | ||
![]() |
6927c989d0 | ||
![]() |
97853d1691 | ||
![]() |
0cdef0d118 | ||
![]() |
0b17ffc243 | ||
![]() |
c516d46f16 | ||
![]() |
cb8ec22b6d | ||
![]() |
4a5fbd79c1 | ||
![]() |
b636a03567 | ||
![]() |
c96faf7c0a | ||
![]() |
2e1cd4076a | ||
![]() |
9984a638ba | ||
![]() |
a492bccc03 | ||
![]() |
e7a0e0f565 | ||
![]() |
030e081d45 | ||
![]() |
8537536368 | ||
![]() |
f03f323aac | ||
![]() |
58c0c67796 | ||
![]() |
f5e196a663 | ||
![]() |
808df68e57 | ||
![]() |
fa51c2e6e9 | ||
![]() |
ba3760e770 | ||
![]() |
ad1a8557b8 | ||
![]() |
fe91f812d9 | ||
![]() |
4cc11305c7 | ||
![]() |
898c0330c8 | ||
![]() |
33e5f94f1f | ||
![]() |
da4ee63890 | ||
![]() |
d34203b133 | ||
![]() |
23addfb9a6 | ||
![]() |
81e1227a7b | ||
![]() |
75be8666a6 | ||
![]() |
6031a60084 | ||
![]() |
39d5785118 | ||
![]() |
bddcdcadb2 | ||
![]() |
3eac6a3366 | ||
![]() |
3c7b962cf9 | ||
![]() |
bd756e2a9c | ||
![]() |
e7920bee2a | ||
![]() |
ebcc21370e | ||
![]() |
34c4acf199 | ||
![]() |
47e45dfc9f | ||
![]() |
2ecea7c1b4 | ||
![]() |
5c0eccd12f | ||
![]() |
f34ab9402b | ||
![]() |
2569a82caf | ||
![]() |
4bdd256000 | ||
![]() |
6f4f6338c5 | ||
![]() |
7cb72b55a8 | ||
![]() |
1a9a08cbfb | ||
![]() |
237ee0363d | ||
![]() |
86180ddc34 | ||
![]() |
eed41d30ec | ||
![]() |
0b0fd6b910 | ||
![]() |
1f887b47ab | ||
![]() |
affd8057ca | ||
![]() |
7a8ee2c46a | ||
![]() |
35fe1f464c | ||
![]() |
0955bafebd | ||
![]() |
2e0c540c63 | ||
![]() |
6e9ef17a28 | ||
![]() |
eb3cdbfeb9 | ||
![]() |
f4cb16ad09 | ||
![]() |
956af2bd62 | ||
![]() |
b76cd5c004 | ||
![]() |
61d9301dcc | ||
![]() |
2ded05be83 | ||
![]() |
899d6766c5 | ||
![]() |
c67d57cef4 | ||
![]() |
b5cca7d341 | ||
![]() |
8919f13911 | ||
![]() |
990ae49608 | ||
![]() |
c2ba02722c | ||
![]() |
5bd1957337 | ||
![]() |
f59f0793bc | ||
![]() |
63b96700e0 | ||
![]() |
dffbcc2c7e | ||
![]() |
0dbe1ecc2a | ||
![]() |
da8526fcec | ||
![]() |
933b6f4d1e | ||
![]() |
16f2dfeebd | ||
![]() |
bc6eb5cab4 | ||
![]() |
8833845b2e | ||
![]() |
391be6afac | ||
![]() |
a4f74676b6 | ||
![]() |
600b32f75b | ||
![]() |
f199a5cf95 | ||
![]() |
5896fde441 | ||
![]() |
9998f9720f | ||
![]() |
f37589daa6 | ||
![]() |
ce2513f175 | ||
![]() |
1a4c5d24a4 | ||
![]() |
886d202f39 | ||
![]() |
5a42019ed7 | ||
![]() |
354093c121 | ||
![]() |
aa9c300d7c | ||
![]() |
d9ad5daae3 | ||
![]() |
4680ba6d0d | ||
![]() |
1423062ac3 | ||
![]() |
a036096684 | ||
![]() |
a1c443a6f2 | ||
![]() |
e6e1367cd6 | ||
![]() |
303e741289 | ||
![]() |
79cc23e273 | ||
![]() |
046ce0230a | ||
![]() |
33a66bee01 | ||
![]() |
f76749a933 | ||
![]() |
385af5bef5 | ||
![]() |
19b72b1a79 | ||
![]() |
f6048467ad | ||
![]() |
dbe6b860c7 | ||
![]() |
0a1e6b3e2d | ||
![]() |
f178dde589 | ||
![]() |
545d45ecf0 | ||
![]() |
c333f94cfa | ||
![]() |
76a999f650 | ||
![]() |
d2f8e35622 | ||
![]() |
1e4ed9c9d1 | ||
![]() |
57c21b4eb5 | ||
![]() |
088cc3ef15 | ||
![]() |
9e326f6324 | ||
![]() |
5840290df7 | ||
![]() |
58fdabb8ff | ||
![]() |
b8fc002fbc | ||
![]() |
d5e349266b | ||
![]() |
4c135dd617 | ||
![]() |
a98a0d1a1a | ||
![]() |
768b4d2b1a | ||
![]() |
ff640c598d | ||
![]() |
c76408e4e8 | ||
![]() |
2e168d089c | ||
![]() |
a61311e928 | ||
![]() |
85ffba90b2 | ||
![]() |
6623ec9bbc | ||
![]() |
7a4ed4029c | ||
![]() |
d4c52ce298 | ||
![]() |
c0aab8497f | ||
![]() |
cabe5b143f | ||
![]() |
3a791bace6 | ||
![]() |
19dfdb094b | ||
![]() |
ec5caa483e | ||
![]() |
e6967f8db5 | ||
![]() |
759356ff2e | ||
![]() |
4d8dbdb486 | ||
![]() |
cddb40a104 | ||
![]() |
36128d119a | ||
![]() |
dadb72aca8 | ||
![]() |
390d4fa6c7 | ||
![]() |
9559c39351 | ||
![]() |
9d95b70534 | ||
![]() |
3bad896978 | ||
![]() |
9f406df129 | ||
![]() |
a9b4174590 | ||
![]() |
9109e3803b | ||
![]() |
422dd78489 | ||
![]() |
9e1d6c9d2b | ||
![]() |
c8e3f2b48a | ||
![]() |
a287f52e47 | ||
![]() |
76952db3eb | ||
![]() |
871721f04b | ||
![]() |
3c0ebdf643 | ||
![]() |
0e258a4ae0 | ||
![]() |
645a8e2372 | ||
![]() |
c6cc8adbb7 | ||
![]() |
dd38c73b85 | ||
![]() |
c916314704 | ||
![]() |
e0dcce5895 | ||
![]() |
906616e224 | ||
![]() |
db20ea95d9 | ||
![]() |
d142ea5d23 | ||
![]() |
5d52404dab | ||
![]() |
43f4b36cfe | ||
![]() |
0393db19e6 | ||
![]() |
b197578df4 | ||
![]() |
ed428c0df4 | ||
![]() |
d38707821c | ||
![]() |
cfb392054e | ||
![]() |
0ea65efeb3 | ||
![]() |
c4ce7d1a74 | ||
![]() |
7ac95b98bc | ||
![]() |
f8413d8d63 | ||
![]() |
709b80b864 | ||
![]() |
b5b68c5c42 | ||
![]() |
d58e847978 | ||
![]() |
aad9ae6997 | ||
![]() |
139cf4fae4 | ||
![]() |
e01b2da223 | ||
![]() |
cbbe2d2d3c | ||
![]() |
7ca11a96b9 | ||
![]() |
3443d6d715 | ||
![]() |
99730734a0 | ||
![]() |
20fcd28dbe | ||
![]() |
76cead72e8 | ||
![]() |
a0f17ffd1d | ||
![]() |
86d92bdfa2 | ||
![]() |
25a0bc6549 | ||
![]() |
96971e7054 | ||
![]() |
2729877fbf | ||
![]() |
3b8b44fcb7 | ||
![]() |
22e3d50203 | ||
![]() |
2090b336e4 | ||
![]() |
a028f11a4a | ||
![]() |
4edd02ca8e | ||
![]() |
85dad8c077 | ||
![]() |
fff2ac3c47 | ||
![]() |
f99c6335c0 | ||
![]() |
98f0ff2a01 | ||
![]() |
43c5fcd159 | ||
![]() |
784b57622c | ||
![]() |
d8d17998fc | ||
![]() |
bf4984e66b | ||
![]() |
297f4af047 | ||
![]() |
f8574a01f6 | ||
![]() |
610441f454 | ||
![]() |
fb9780c1f2 | ||
![]() |
7804c5fd6c | ||
![]() |
c711632b3e | ||
![]() |
43e245fb84 | ||
![]() |
5a4d7c5b21 | ||
![]() |
67e2ad99c9 | ||
![]() |
ea2edadac2 | ||
![]() |
1e78c60a65 | ||
![]() |
5f3147d6f4 | ||
![]() |
03c3c9b6a1 | ||
![]() |
f056d175b7 | ||
![]() |
9fb1aa626d | ||
![]() |
30243c39e6 | ||
![]() |
d285fd4ad4 | ||
![]() |
7a0b9cc1ac | ||
![]() |
cc63008a86 | ||
![]() |
f9c7371140 | ||
![]() |
71590f90ae | ||
![]() |
e1028d6eca | ||
![]() |
f231d54daa | ||
![]() |
094c5968f4 | ||
![]() |
6c217d506c | ||
![]() |
0d867af79f | ||
![]() |
c9876988da | ||
![]() |
454d82d985 | ||
![]() |
14ee26ea29 | ||
![]() |
86a7f11f64 | ||
![]() |
78d1e1d9e7 | ||
![]() |
a3f67809a6 | ||
![]() |
e0be15cb45 | ||
![]() |
f1ce5faf17 | ||
![]() |
322480bba1 | ||
![]() |
d2db89a665 | ||
![]() |
fc17893158 | ||
![]() |
e2bf267713 | ||
![]() |
e25d30af52 | ||
![]() |
2bd1636097 | ||
![]() |
c019d1f3c5 | ||
![]() |
5b23347563 | ||
![]() |
daab4a86b2 | ||
![]() |
5831177fd8 | ||
![]() |
f9500f6d90 | ||
![]() |
29ac861b87 | ||
![]() |
b05f2db023 | ||
![]() |
8af1dfc882 | ||
![]() |
c76e851029 | ||
![]() |
b5ec1e0cfd | ||
![]() |
fe72e768ec | ||
![]() |
360f546ab0 | ||
![]() |
eb0ee31b5a | ||
![]() |
62df079be7 | ||
![]() |
40e5e6eb9d | ||
![]() |
dbc080c24d | ||
![]() |
f340a19e40 | ||
![]() |
20856126c8 | ||
![]() |
3ef76a4ada | ||
![]() |
14500d3ac4 | ||
![]() |
318ca828cc | ||
![]() |
5c70d68262 | ||
![]() |
082770256b | ||
![]() |
ae003e5b76 | ||
![]() |
530f17d502 | ||
![]() |
f127de8059 | ||
![]() |
9afb136648 | ||
![]() |
07239fec08 | ||
![]() |
23661dc2fd | ||
![]() |
de34c058a1 | ||
![]() |
820daa4f2b | ||
![]() |
fe4e1a1933 | ||
![]() |
8c191e8c98 | ||
![]() |
9e95e8671e | ||
![]() |
1ad196424f | ||
![]() |
878eb40258 | ||
![]() |
61e133df6b | ||
![]() |
23278550be | ||
![]() |
ce01e53806 | ||
![]() |
716018a26f | ||
![]() |
912e24229f |
51
.devcontainer/Dockerfile
Normal file
51
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,51 @@
|
||||
FROM python:3.7
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Install Node/Yarn for Frontent
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
apt-utils \
|
||||
apt-transport-https \
|
||||
&& curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
nodejs \
|
||||
yarn \
|
||||
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV NVM_DIR /root/.nvm
|
||||
|
||||
# Install docker
|
||||
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
curl \
|
||||
software-properties-common \
|
||||
gpg-agent \
|
||||
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
docker-ce \
|
||||
docker-ce-cli \
|
||||
containerd.io \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install tools
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
jq \
|
||||
dbus \
|
||||
network-manager \
|
||||
libpulse0 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt ./
|
||||
RUN pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
&& pip3 install tox \
|
||||
&& rm -f requirements.txt requirements_tests.txt
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL /bin/bash
|
24
.devcontainer/devcontainer.json
Normal file
24
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,24 @@
|
||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||
{
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py37"],
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
23
.dockerignore
Normal file
23
.dockerignore
Normal file
@@ -0,0 +1,23 @@
|
||||
# General files
|
||||
.git
|
||||
.github
|
||||
.devcontainer
|
||||
.vscode
|
||||
|
||||
# Test related files
|
||||
.tox
|
||||
|
||||
# Temporary files
|
||||
**/__pycache__
|
||||
.pytest_cache
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
|
||||
# Data
|
||||
home-assistant-polymer/
|
||||
script/
|
||||
tests/
|
||||
|
||||
# Test ENV
|
||||
data/
|
29
.github/ISSUE_TEMPLATE.md
vendored
Normal file
29
.github/ISSUE_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,29 @@
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with a Add-on, make a issue on there repository.
|
||||
-->
|
||||
|
||||
**Home Assistant release with the issue:**
|
||||
<!--
|
||||
- Frontend -> Developer tools -> Info
|
||||
- Or use this command: hass --version
|
||||
-->
|
||||
|
||||
**Operating environment (HassOS/Generic):**
|
||||
<!--
|
||||
Please provide details about your environment.
|
||||
-->
|
||||
|
||||
**Supervisor logs:**
|
||||
<!--
|
||||
- Frontend -> Hass.io -> System
|
||||
- Or use this command: hassio su logs
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2020-01-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
13
.github/move.yml
vendored
Normal file
13
.github/move.yml
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
# Configuration for move-issues - https://github.com/dessant/move-issues
|
||||
|
||||
# Delete the command comment. Ignored when the comment also contains other content
|
||||
deleteCommand: true
|
||||
# Close the source issue after moving
|
||||
closeSourceIssue: true
|
||||
# Lock the source issue after moving
|
||||
lockSourceIssue: false
|
||||
# Set custom aliases for targets
|
||||
# aliases:
|
||||
# r: repo
|
||||
# or: owner/repo
|
||||
|
4
.github/release-drafter.yml
vendored
Normal file
4
.github/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
template: |
|
||||
## What's Changed
|
||||
|
||||
$CHANGES
|
17
.github/stale.yml
vendored
Normal file
17
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -90,3 +90,8 @@ ENV/
|
||||
|
||||
# pylint
|
||||
.pylint.d/
|
||||
|
||||
# VS Code
|
||||
.vscode/*
|
||||
!.vscode/cSpell.json
|
||||
!.vscode/tasks.json
|
||||
|
4
.gitmodules
vendored
Normal file
4
.gitmodules
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
[submodule "home-assistant-polymer"]
|
||||
path = home-assistant-polymer
|
||||
url = https://github.com/home-assistant/home-assistant-polymer
|
||||
branch = dev
|
5
.hadolint.yaml
Normal file
5
.hadolint.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
ignored:
|
||||
- DL3018
|
||||
- DL3006
|
||||
- DL3013
|
||||
- SC2155
|
12
.travis.yml
12
.travis.yml
@@ -1,12 +0,0 @@
|
||||
sudo: false
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.5"
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/.cache/pip
|
||||
install: pip install -U tox
|
||||
language: python
|
||||
script: tox
|
90
.vscode/tasks.json
vendored
Normal file
90
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 hassio tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint hassio",
|
||||
"dependsOn": ["Install all Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
34
Dockerfile
Normal file
34
Dockerfile
Normal file
@@ -0,0 +1,34 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl \
|
||||
socat
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN pip3 install --no-cache-dir -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
WORKDIR /
|
||||
COPY rootfs /
|
218
LICENSE
218
LICENSE
@@ -1,29 +1,201 @@
|
||||
BSD 3-Clause License
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright (c) 2017, Pascal Vizeli
|
||||
All rights reserved.
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are met:
|
||||
1. Definitions.
|
||||
|
||||
* Redistributions of source code must retain the above copyright notice, this
|
||||
list of conditions and the following disclaimer.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
* Redistributions in binary form must reproduce the above copyright notice,
|
||||
this list of conditions and the following disclaimer in the documentation
|
||||
and/or other materials provided with the distribution.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
* Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017 Pascal Vizeli
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
3
MANIFEST.in
Normal file
3
MANIFEST.in
Normal file
@@ -0,0 +1,3 @@
|
||||
include LICENSE.md
|
||||
graft supervisor
|
||||
recursive-exclude * *.py[co]
|
49
README.md
49
README.md
@@ -1,39 +1,28 @@
|
||||
# HassIO
|
||||
First private cloud solution for home automation.
|
||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||
|
||||
It is a docker image (supervisor) they manage HomeAssistant docker and give a interface to controll itself over UI. It have a own eco system with addons to extend the functionality in a easy way.
|
||||
# Home Assistant Supervisor
|
||||
|
||||
[HassIO-Addons](https://github.com/pvizeli/hassio-addons)
|
||||
[HassIO-Build](https://github.com/pvizeli/hassio-build)
|
||||
## First private cloud solution for home automation
|
||||
|
||||
## History
|
||||
- **0.1**: Initial supervisor with setup HomeAssistant docker
|
||||
- **0.2**: Support for basic HostControll
|
||||
- **0.3**: Refactor code and add basic rest api
|
||||
- **0.4**: Move network api code / ssl folder
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||
# Hardware Image
|
||||
The image is based on ResinOS and Yocto Linux. It comes with the HassIO supervisor pre-installed. This includes support to update the supervisor over the air. After flashing your host OS will not require any more maintenance! The image does not include Home Assistant, instead it will downloaded when the image boots up for the first time.
|
||||
## Installation
|
||||
|
||||
Download can be found here: https://drive.google.com/drive/folders/0B2o1Uz6l1wVNbFJnb2gwNXJja28?usp=sharing
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
|
||||
After extracting the archive, flash it to a drive using [Etcher](https://etcher.io/).
|
||||
## Development
|
||||
|
||||
## History
|
||||
- **0.1**: First techpreview with dumy supervisor (ResinOS 2.0.0-RC5)
|
||||
- **0.2**: Fix some bugs and update it to HassIO 0.2
|
||||
- **0.3**: Update HostControll and feature for HassIO 0.3 (ResinOS 2.0.0 / need reflash)
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
|
||||
## Configuring the image
|
||||
You can configure the WiFi network that the image should connect to after flashing using [`resin-device-toolbox`](https://resinos.io/docs/raspberrypi3/gettingstarted/#install-resin-device-toolbox).
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
||||
## Developer access to ResinOS host
|
||||
Create an `authorized_keys` file in the boot partition of your SD card with your public key. After a boot it, you can acces your device as root over ssh on port 22222.
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
Read logoutput from supervisor:
|
||||
```bash
|
||||
journalctl -f -u resin-supervisor.service
|
||||
docker logs homeassistant
|
||||
```
|
||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||
|
52
azure-pipelines-ci.yml
Normal file
52
azure-pipelines-ci.yml
Normal file
@@ -0,0 +1,52 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: versionHadolint
|
||||
value: "v1.16.3"
|
||||
|
||||
jobs:
|
||||
- job: "Tox"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libpulse0 libudev1
|
||||
displayName: "Install Host library"
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- script: pip install tox
|
||||
displayName: "Install Tox"
|
||||
- script: tox
|
||||
displayName: "Run Tox"
|
||||
- job: "JQ"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: "Install JQ"
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: "Run JQ"
|
||||
- job: "Hadolint"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: "Install Hadolint"
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: "Run Hadolint"
|
53
azure-pipelines-release.yml
Normal file
53
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,53 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- "*"
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: "7.0"
|
||||
- group: docker
|
||||
|
||||
jobs:
|
||||
- job: "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.7"
|
||||
inputs:
|
||||
versionSpec: "3.7"
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: "Check version of branch/tag"
|
||||
- job: "Release"
|
||||
dependsOn:
|
||||
- "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: "Docker hub login"
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: "Install Builder"
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--generic $(Build.SourceBranchName) --all -t /data
|
||||
displayName: "Build Release"
|
26
azure-pipelines-wheels.yml
Normal file
26
azure-pipelines-wheels.yml
Normal file
@@ -0,0 +1,26 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.6.1-3.7-alpine3.11'
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: 'home-assistant/ci-azure'
|
||||
endpoint: 'home-assistant'
|
||||
|
||||
|
||||
jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;libffi-dev;openssl-dev'
|
||||
builderPip: 'Cython'
|
||||
wheelsRequirement: 'requirements.txt'
|
13
build.json
Normal file
13
build.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.7-alpine3.11",
|
||||
"armhf": "homeassistant/armhf-base-python:3.7-alpine3.11",
|
||||
"armv7": "homeassistant/armv7-base-python:3.7-alpine3.11",
|
||||
"amd64": "homeassistant/amd64-base-python:3.7-alpine3.11",
|
||||
"i386": "homeassistant/i386-base-python:3.7-alpine3.11"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "supervisor"
|
||||
}
|
||||
}
|
@@ -1 +0,0 @@
|
||||
"""Init file for HassIO."""
|
@@ -1,35 +0,0 @@
|
||||
"""Main file for HassIO."""
|
||||
import asyncio
|
||||
import logging
|
||||
import signal
|
||||
|
||||
import hassio.bootstrap as bootstrap
|
||||
import hassio.core as core
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
|
||||
if not bootstrap.check_environment():
|
||||
exit(1)
|
||||
|
||||
loop = asyncio.get_event_loop()
|
||||
hassio = core.HassIO(loop)
|
||||
|
||||
_LOGGER.info("Run Hassio setup")
|
||||
loop.run_until_complete(hassio.setup())
|
||||
|
||||
_LOGGER.info("Start Hassio task")
|
||||
loop.create_task(hassio.start())
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(
|
||||
signal.SIGTERM, lambda: loop.create_task(hassio.stop()))
|
||||
except ValueError:
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
loop.run_forever()
|
||||
_LOGGER.info("Close Hassio")
|
@@ -1,79 +0,0 @@
|
||||
"""Init file for HassIO rest api."""
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .host import APIHost
|
||||
from .network import APINetwork
|
||||
from .supervisor import APISupervisor
|
||||
from .homeassistant import APIHomeAssistant
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RestAPI(object):
|
||||
"""Handle rest api for hassio."""
|
||||
|
||||
def __init__(self, config, loop):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.webapp = web.Application(loop=self.loop)
|
||||
|
||||
# service stuff
|
||||
self._handler = None
|
||||
self.server = None
|
||||
|
||||
def register_host(self, host_controll):
|
||||
"""Register hostcontroll function."""
|
||||
api_host = APIHost(self.config, self.loop, host_controll)
|
||||
|
||||
self.webapp.router.add_get('/host/info', api_host.info)
|
||||
self.webapp.router.add_get('/host/reboot', api_host.reboot)
|
||||
self.webapp.router.add_get('/host/shutdown', api_host.shutdown)
|
||||
self.webapp.router.add_get('/host/update', api_host.update)
|
||||
|
||||
def register_network(self, host_controll):
|
||||
"""Register network function."""
|
||||
api_net = APINetwork(self.config, self.loop, host_controll)
|
||||
|
||||
self.webapp.router.add_get('/network/info', api_net.info)
|
||||
self.webapp.router.add_get('/network/options', api_net.options)
|
||||
|
||||
def register_supervisor(self, host_controll):
|
||||
"""Register supervisor function."""
|
||||
api_supervisor = APISupervisor(self.config, self.loop, host_controll)
|
||||
|
||||
self.webapp.router.add_get('/supervisor/info', api_supervisor.info)
|
||||
self.webapp.router.add_get('/supervisor/update', api_supervisor.update)
|
||||
self.webapp.router.add_get(
|
||||
'/supervisor/options', api_supervisor.options)
|
||||
|
||||
def register_homeassistant(self, dock_homeassistant):
|
||||
"""Register homeassistant function."""
|
||||
api_hass = APIHomeAssistant(self.config, self.loop, dock_homeassistant)
|
||||
|
||||
self.webapp.router.add_get('/homeassistant/info', api_hass.info)
|
||||
self.webapp.router.add_get('/homeassistant/update', api_hass.update)
|
||||
|
||||
async def start(self):
|
||||
"""Run rest api webserver."""
|
||||
self._handler = self.webapp.make_handler(loop=self.loop)
|
||||
|
||||
try:
|
||||
self.server = await self.loop.create_server(
|
||||
self._handler, "0.0.0.0", "80")
|
||||
except OSError as err:
|
||||
_LOGGER.fatal(
|
||||
"Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop rest api webserver."""
|
||||
if self.server:
|
||||
self.server.close()
|
||||
await self.server.wait_closed()
|
||||
await self.webapp.shutdown()
|
||||
|
||||
if self._handler:
|
||||
await self._handler.finish_connections(60)
|
||||
await self.webapp.cleanup()
|
@@ -1,42 +0,0 @@
|
||||
"""Init file for HassIO homeassistant rest api."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from .util import api_process, json_loads
|
||||
from ..const import ATTR_VERSION, ATTR_CURRENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHomeAssistant(object):
|
||||
"""Handle rest api for homeassistant functions."""
|
||||
|
||||
def __init__(self, config, loop, dock_hass):
|
||||
"""Initialize homeassistant rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.dock_hass = dock_hass
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
info = {
|
||||
ATTR_VERSION: self.dock_hass.version,
|
||||
ATTR_CURRENT: self.config.current_homeassistant,
|
||||
}
|
||||
|
||||
return info
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await request.json(loads=json_loads)
|
||||
version = body.get(ATTR_VERSION, self.config.current_homeassistant)
|
||||
|
||||
if self.dock_hass.in_progress:
|
||||
raise RuntimeError("Other task is in progress.")
|
||||
|
||||
if version == self.dock_hass.version:
|
||||
raise RuntimeError("%s is already in use.", version)
|
||||
|
||||
return await asyncio.shield(self.dock_hass.update(version))
|
@@ -1,53 +0,0 @@
|
||||
"""Init file for HassIO host rest api."""
|
||||
import logging
|
||||
|
||||
from .util import api_process_hostcontroll, json_loads
|
||||
from ..const import ATTR_VERSION
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHost(object):
|
||||
"""Handle rest api for host functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll):
|
||||
"""Initialize host rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
|
||||
@api_process_hostcontroll
|
||||
def info(self, request):
|
||||
"""Return host information."""
|
||||
return self.host_controll.info()
|
||||
|
||||
@api_process_hostcontroll
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return self.host_controll.reboot()
|
||||
|
||||
@api_process_hostcontroll
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return self.host_controll.shutdown()
|
||||
|
||||
@api_process_hostcontroll
|
||||
def network_info(self, request):
|
||||
"""Edit network settings."""
|
||||
pass
|
||||
|
||||
@api_process_hostcontroll
|
||||
def network_update(self, request):
|
||||
"""Edit network settings."""
|
||||
pass
|
||||
|
||||
@api_process_hostcontroll
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await request.json(loads=json_loads)
|
||||
version = body.get(ATTR_VERSION)
|
||||
|
||||
if version == self.host_controll.version:
|
||||
raise RuntimeError("%s is already in use.", version)
|
||||
|
||||
return await self.host_controll.host_update(version=version)
|
@@ -1,26 +0,0 @@
|
||||
"""Init file for HassIO network rest api."""
|
||||
import logging
|
||||
|
||||
from .util import api_process_hostcontroll
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APINetwork(object):
|
||||
"""Handle rest api for network functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll):
|
||||
"""Initialize network rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
|
||||
@api_process_hostcontroll
|
||||
def info(self, request):
|
||||
"""Show network settings."""
|
||||
pass
|
||||
|
||||
@api_process_hostcontroll
|
||||
def options(self, request):
|
||||
"""Edit network settings."""
|
||||
pass
|
@@ -1,49 +0,0 @@
|
||||
"""Init file for HassIO supervisor rest api."""
|
||||
import logging
|
||||
|
||||
from .util import api_process, api_process_hostcontroll, json_loads
|
||||
from ..const import ATTR_VERSION, ATTR_CURRENT, ATTR_BETA, HASSIO_VERSION
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APISupervisor(object):
|
||||
"""Handle rest api for supervisor functions."""
|
||||
|
||||
def __init__(self, config, loop, host_controll):
|
||||
"""Initialize supervisor rest api part."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.host_controll = host_controll
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
info = {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_CURRENT: self.config.current_hassio,
|
||||
ATTR_BETA: self.config.upstream_beta,
|
||||
}
|
||||
|
||||
return info
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set supervisor options."""
|
||||
body = await request.json(loads=json_loads)
|
||||
|
||||
if ATTR_BETA in body:
|
||||
self.config.upstream_beta = body[ATTR_BETA]
|
||||
|
||||
return self.config.save()
|
||||
|
||||
@api_process_hostcontroll
|
||||
async def update(self, request):
|
||||
"""Update host OS."""
|
||||
body = await request.json(loads=json_loads)
|
||||
version = body.get(ATTR_VERSION, self.config.current_hassio)
|
||||
|
||||
if version == HASSIO_VERSION:
|
||||
raise RuntimeError("%s is already in use.", version)
|
||||
|
||||
return await self.host_controll.supervisor_update(version=version)
|
@@ -1,81 +0,0 @@
|
||||
"""Init file for HassIO util for rest api."""
|
||||
import json
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
from aiohttp.web_exceptions import HTTPServiceUnavailable
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def json_loads(data):
|
||||
"""Extract json from string with support for '' and None."""
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
return {}
|
||||
|
||||
|
||||
def api_process(method):
|
||||
"""Wrap function with true/false calls to rest api."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
return api_return_error(message=str(err))
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
def api_process_hostcontroll(method):
|
||||
"""Wrap HostControll calls to rest api."""
|
||||
async def wrap_hostcontroll(api, *args, **kwargs):
|
||||
"""Return host information."""
|
||||
if not api.host_controll.active:
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except RuntimeError as err:
|
||||
return api_return_error(message=str(err))
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
elif answer is None:
|
||||
return api_not_supported()
|
||||
elif answer:
|
||||
return api_return_ok()
|
||||
return api_return_error()
|
||||
|
||||
return wrap_hostcontroll
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return a API error message."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
})
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
"""Return a API ok answer."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_OK,
|
||||
JSON_DATA: data,
|
||||
})
|
||||
|
||||
|
||||
def api_not_supported():
|
||||
"""Return a api error with not supported."""
|
||||
return api_return_error("Function is not supported")
|
@@ -1,72 +0,0 @@
|
||||
"""Bootstrap HassIO."""
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .const import SOCKET_DOCKER
|
||||
from .config import CoreConfig
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_system_data(websession):
|
||||
"""Setup default config and create folders."""
|
||||
config = CoreConfig(websession)
|
||||
|
||||
# homeassistant config folder
|
||||
if not os.path.isdir(config.path_config):
|
||||
_LOGGER.info(
|
||||
"Create Home-Assistant config folder %s", config.path_config)
|
||||
os.mkdir(config.path_config)
|
||||
|
||||
# homeassistant ssl folder
|
||||
if not os.path.isdir(config.path_ssl):
|
||||
_LOGGER.info("Create Home-Assistant ssl folder %s", config.path_ssl)
|
||||
os.mkdir(config.path_ssl)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
|
||||
"[%(name)s] %(message)s")
|
||||
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
||||
datefmt = '%y-%m-%d %H:%M:%S'
|
||||
|
||||
# suppress overly verbose logs from libraries that aren't helpful
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
|
||||
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
'DEBUG': 'cyan',
|
||||
'INFO': 'green',
|
||||
'WARNING': 'yellow',
|
||||
'ERROR': 'red',
|
||||
'CRITICAL': 'red',
|
||||
}
|
||||
))
|
||||
|
||||
|
||||
def check_environment():
|
||||
"""Check if all environment are exists."""
|
||||
for key in ('SUPERVISOR_SHARE', 'SUPERVISOR_NAME',
|
||||
'HOMEASSISTANT_REPOSITORY'):
|
||||
try:
|
||||
os.environ[key]
|
||||
except KeyError:
|
||||
_LOGGER.fatal("Can't find %s in env!", key)
|
||||
return False
|
||||
|
||||
mode = os.stat(SOCKET_DOCKER)[stat.ST_MODE]
|
||||
if not stat.S_ISSOCK(mode):
|
||||
_LOGGER.fatal("Can't find docker socket!")
|
||||
return False
|
||||
|
||||
return True
|
114
hassio/config.py
114
hassio/config.py
@@ -1,114 +0,0 @@
|
||||
"""Bootstrap HassIO."""
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from .const import FILE_HASSIO_CONFIG, HASSIO_SHARE
|
||||
from .tools import fetch_current_versions
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HOMEASSISTANT_CONFIG = "{}/homeassistant_config"
|
||||
HOMEASSISTANT_IMAGE = 'homeassistant_image'
|
||||
HOMEASSISTANT_CURRENT = 'homeassistant_current'
|
||||
|
||||
HASSIO_SSL = "{}/ssl"
|
||||
HASSIO_CURRENT = 'hassio_current'
|
||||
UPSTREAM_BETA = 'upstream_beta'
|
||||
|
||||
|
||||
class CoreConfig(object):
|
||||
"""Hold all config data."""
|
||||
|
||||
def __init__(self, websession, config_file=FILE_HASSIO_CONFIG):
|
||||
"""Initialize config object."""
|
||||
self.websession = websession
|
||||
self._filename = config_file
|
||||
self._data = {}
|
||||
|
||||
# init or load data
|
||||
if os.path.isfile(self._filename):
|
||||
try:
|
||||
with open(self._filename, 'r') as cfile:
|
||||
self._data = json.loads(cfile.read())
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't read %s", self._filename)
|
||||
|
||||
# init data
|
||||
if not self._data:
|
||||
self._data.update({
|
||||
HOMEASSISTANT_IMAGE: os.environ['HOMEASSISTANT_REPOSITORY'],
|
||||
UPSTREAM_BETA: False,
|
||||
})
|
||||
self.save()
|
||||
|
||||
def save(self):
|
||||
"""Store data to config file."""
|
||||
try:
|
||||
with open(self._filename, 'w') as conf_file:
|
||||
conf_file.write(json.dumps(self._data))
|
||||
except OSError:
|
||||
_LOGGER.exception("Can't store config in %s", self._filename)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def fetch_update_infos(self):
|
||||
"""Read current versions from web."""
|
||||
current = await fetch_current_versions(
|
||||
self.websession, beta=self.upstream_beta)
|
||||
|
||||
if current:
|
||||
self._data.update({
|
||||
HOMEASSISTANT_CURRENT: current.get('homeassistant_tag'),
|
||||
HASSIO_CURRENT: current.get('hassio_tag'),
|
||||
})
|
||||
self.save()
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def upstream_beta(self):
|
||||
"""Return True if we run in beta upstream."""
|
||||
return self._data.get(UPSTREAM_BETA, False)
|
||||
|
||||
@upstream_beta.setter
|
||||
def upstream_beta(self, value):
|
||||
"""Set beta upstream mode."""
|
||||
self._data[UPSTREAM_BETA] = bool(value)
|
||||
|
||||
@property
|
||||
def homeassistant_image(self):
|
||||
"""Return docker homeassistant repository."""
|
||||
return self._data.get(HOMEASSISTANT_IMAGE)
|
||||
|
||||
@property
|
||||
def current_homeassistant(self):
|
||||
"""Actual version of homeassistant."""
|
||||
return self._data.get(HOMEASSISTANT_CURRENT)
|
||||
|
||||
@property
|
||||
def current_hassio(self):
|
||||
"""Actual version of hassio."""
|
||||
return self._data.get(HASSIO_CURRENT)
|
||||
|
||||
@property
|
||||
def path_config_docker(self):
|
||||
"""Return config path extern for docker."""
|
||||
return HOMEASSISTANT_CONFIG.format(os.environ['SUPERVISOR_SHARE'])
|
||||
|
||||
@property
|
||||
def path_config(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return HOMEASSISTANT_CONFIG.format(HASSIO_SHARE)
|
||||
|
||||
@property
|
||||
def path_ssl_docker(self):
|
||||
"""Return SSL path extern for docker."""
|
||||
return HASSIO_SSL.format(os.environ['SUPERVISOR_SHARE'])
|
||||
|
||||
@property
|
||||
def path_ssl(self):
|
||||
"""Return SSL path inside supervisor."""
|
||||
return HASSIO_SSL.format(HASSIO_SHARE)
|
@@ -1,30 +0,0 @@
|
||||
"""Const file for HassIO."""
|
||||
HASSIO_VERSION = '0.4'
|
||||
|
||||
URL_HASSIO_VERSION = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version.json'
|
||||
URL_HASSIO_VERSION_BETA = \
|
||||
'https://raw.githubusercontent.com/pvizeli/hassio/master/version_beta.json'
|
||||
|
||||
URL_ADDONS_REPO = 'https://github.com/pvizeli/hassio-addons'
|
||||
|
||||
HASSIO_SHARE = "/data"
|
||||
|
||||
RUN_UPDATE_INFO_TASKS = 28800
|
||||
|
||||
FILE_HASSIO_ADDONS = "{}/addons.json".format(HASSIO_SHARE)
|
||||
FILE_HASSIO_CONFIG = "{}/config.json".format(HASSIO_SHARE)
|
||||
|
||||
SOCKET_DOCKER = "/var/run/docker.sock"
|
||||
SOCKET_HC = "/var/run/hassio-hc.sock"
|
||||
|
||||
JSON_RESULT = 'result'
|
||||
JSON_DATA = 'data'
|
||||
JSON_MESSAGE = 'message'
|
||||
|
||||
RESULT_ERROR = 'error'
|
||||
RESULT_OK = 'ok'
|
||||
|
||||
ATTR_VERSION = 'version'
|
||||
ATTR_CURRENT = 'current'
|
||||
ATTR_BETA = 'beta'
|
101
hassio/core.py
101
hassio/core.py
@@ -1,101 +0,0 @@
|
||||
"""Main file for HassIO."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
import docker
|
||||
|
||||
from . import bootstrap
|
||||
from .api import RestAPI
|
||||
from .host_controll import HostControll
|
||||
from .const import SOCKET_DOCKER, RUN_UPDATE_INFO_TASKS
|
||||
from .scheduler import Scheduler
|
||||
from .dock.homeassistant import DockerHomeAssistant
|
||||
from .dock.supervisor import DockerSupervisor
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassIO(object):
|
||||
"""Main object of hassio."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize hassio object."""
|
||||
self.loop = loop
|
||||
self.websession = aiohttp.ClientSession(loop=self.loop)
|
||||
self.config = bootstrap.initialize_system_data(self.websession)
|
||||
self.scheduler = Scheduler(self.loop)
|
||||
self.api = RestAPI(self.config, self.loop)
|
||||
self.dock = docker.DockerClient(
|
||||
base_url="unix:/{}".format(SOCKET_DOCKER), version='auto')
|
||||
|
||||
# init basic docker container
|
||||
self.supervisor = DockerSupervisor(
|
||||
self.config, self.loop, self.dock)
|
||||
self.homeassistant = DockerHomeAssistant(
|
||||
self.config, self.loop, self.dock)
|
||||
|
||||
# init HostControll
|
||||
self.host_controll = HostControll(self.loop)
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# supervisor
|
||||
await self.supervisor.attach()
|
||||
|
||||
# hostcontroll
|
||||
host_info = await self.host_controll.info()
|
||||
if host_info:
|
||||
self.host_controll.version = host_info.get('version')
|
||||
_LOGGER.info(
|
||||
"Connected to HostControll. OS: %s Version: %s Hostname: %s "
|
||||
"Feature-lvl: %d", host_info.get('os'),
|
||||
host_info.get('version'), host_info.get('hostname'),
|
||||
host_info.get('level', 0))
|
||||
|
||||
# rest api views
|
||||
self.api.register_host(self.host_controll)
|
||||
self.api.register_network(self.host_controll)
|
||||
self.api.register_supervisor(self.host_controll)
|
||||
self.api.register_homeassistant(self.homeassistant)
|
||||
|
||||
# schedule update info tasks
|
||||
self.scheduler.register_task(
|
||||
self.config.fetch_update_infos, RUN_UPDATE_INFO_TASKS,
|
||||
first_run=True)
|
||||
|
||||
# first start of supervisor?
|
||||
if not await self.homeassistant.exists():
|
||||
_LOGGER.info("No HomeAssistant docker found.")
|
||||
await self._setup_homeassistant()
|
||||
|
||||
async def start(self):
|
||||
"""Start HassIO orchestration."""
|
||||
# start api
|
||||
await self.api.start()
|
||||
|
||||
# run HomeAssistant
|
||||
await self.homeassistant.run()
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
tasks = [self.websession.close(), self.api.stop()]
|
||||
await asyncio.wait(tasks, loop=self.loop)
|
||||
|
||||
self.loop.stop()
|
||||
|
||||
async def _setup_homeassistant(self):
|
||||
"""Install a homeassistant docker container."""
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.config.current_homeassistant:
|
||||
await self.config.fetch_update_infos()
|
||||
|
||||
tag = self.config.current_homeassistant
|
||||
if tag and await self.homeassistant.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on setup HomeAssistant. Retry in 60.")
|
||||
await asyncio.sleep(60, loop=self.loop)
|
||||
|
||||
# store version
|
||||
_LOGGER.info("HomeAssistant docker now installed.")
|
@@ -1,217 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..tools import get_version_from_env
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerBase(object):
|
||||
"""Docker hassio wrapper."""
|
||||
|
||||
def __init__(self, config, loop, dock, image=None):
|
||||
"""Initialize docker base wrapper."""
|
||||
self.config = config
|
||||
self.loop = loop
|
||||
self.dock = dock
|
||||
self.image = image
|
||||
self.container = None
|
||||
self.version = None
|
||||
self._lock = asyncio.Lock(loop=loop)
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
"""Return name of docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self._lock.locked()
|
||||
|
||||
async def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute install while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._install, tag)
|
||||
|
||||
def _install(self, tag):
|
||||
"""Pull docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", self.image, tag)
|
||||
image = self.dock.images.pull("{}:{}".format(self.image, tag))
|
||||
|
||||
image.tag(self.image, tag='latest')
|
||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||
_LOGGER.info("Tag image %s with version %s as latest.",
|
||||
self.image, self.version)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", self.image, tag, err)
|
||||
return False
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
"""Return True if docker image exists in local repo.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.loop.run_in_executor(None, self._exists)
|
||||
|
||||
def _exists(self):
|
||||
"""Return True if docker image exists in local repo.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.dock.images.get(self.image)
|
||||
self.version = get_version_from_env(image.attrs['Config']['Env'])
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if docker is Running.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.loop.run_in_executor(None, self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
"""Return True if docker is Running.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self.container.reload()
|
||||
return self.container.status == 'running'
|
||||
|
||||
async def attach(self):
|
||||
"""Attach to running docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute attach while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._attach)
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.container = self.dock.containers.get(self.docker_name)
|
||||
self.image = self.container.attrs['Config']['Image']
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
_LOGGER.info("Attach to image %s with version %s.",
|
||||
self.image, self.version)
|
||||
except (docker.errors.DockerException, KeyError):
|
||||
_LOGGER.fatal(
|
||||
"Can't attach to %s docker container!", self.docker_name)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute run while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
_LOGGER.info("Run docker image %s with version %s.",
|
||||
self.image, self.version)
|
||||
return await self.loop.run_in_executor(None, self._run)
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute stop while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
await self.loop.run_in_executor(None, self._stop)
|
||||
return True
|
||||
|
||||
def _stop(self):
|
||||
"""Stop/remove and remove docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self.container:
|
||||
return
|
||||
|
||||
self.container.reload()
|
||||
if self.container.status == 'running':
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.container.stop()
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
self.container.remove(force=True)
|
||||
|
||||
self.container = None
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update a docker image.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
if self._lock.locked():
|
||||
_LOGGER.error("Can't excute update while a task is in progress")
|
||||
return False
|
||||
|
||||
async with self._lock:
|
||||
return await self.loop.run_in_executor(None, self._update, tag)
|
||||
|
||||
def _update(self, tag):
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
old_image = "{}:{}".format(self.image, self.version)
|
||||
old_run = self._is_running()
|
||||
|
||||
_LOGGER.info("Update docker %s with %s:%s.",
|
||||
old_image, self.image, tag)
|
||||
|
||||
# update docker image
|
||||
if self._install(tag):
|
||||
_LOGGER.info("Cleanup old %s docker.", old_image)
|
||||
self._stop()
|
||||
try:
|
||||
self.dock.images.remove(image=old_image, force=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning(
|
||||
"Can't remove old image %s -> %s.", old_image, err)
|
||||
# restore
|
||||
if old_run:
|
||||
self._run()
|
||||
return True
|
||||
|
||||
return False
|
@@ -1,66 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from . import DockerBase
|
||||
from ..tools import get_version_from_env, get_local_ip
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
|
||||
|
||||
class DockerHomeAssistant(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
def __init__(self, config, loop, dock):
|
||||
"""Initialize docker homeassistant wrapper."""
|
||||
super().__init__(config, loop, dock, image=config.homeassistant_image)
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
"""Return name of docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
def _run(self):
|
||||
"""Run docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return
|
||||
|
||||
api_endpoint = get_local_ip(self.loop)
|
||||
|
||||
# cleanup old container
|
||||
self._stop()
|
||||
|
||||
try:
|
||||
self.container = self.dock.containers.run(
|
||||
self.image,
|
||||
name=self.docker_name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
network_mode='host',
|
||||
restart_policy={
|
||||
"Name": "always",
|
||||
"MaximumRetryCount": 10,
|
||||
},
|
||||
environment={
|
||||
'HASSIO': api_endpoint,
|
||||
},
|
||||
volumes={
|
||||
self.config.path_config_docker:
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
self.config.path_ssl_docker:
|
||||
{'bind': '/ssl', 'mode': 'rw'},
|
||||
})
|
||||
|
||||
self.version = get_version_from_env(
|
||||
self.container.attrs['Config']['Env'])
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't run %s -> %s.", self.image, err)
|
||||
return False
|
||||
|
||||
return True
|
@@ -1,29 +0,0 @@
|
||||
"""Init file for HassIO docker object."""
|
||||
import os
|
||||
|
||||
from . import DockerBase
|
||||
|
||||
|
||||
class DockerSupervisor(DockerBase):
|
||||
"""Docker hassio wrapper for HomeAssistant."""
|
||||
|
||||
@property
|
||||
def docker_name(self):
|
||||
"""Return name of docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
async def run(self):
|
||||
"""Run docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def install(self, tag):
|
||||
"""Pull docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop/remove docker container."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
||||
|
||||
async def update(self, tag):
|
||||
"""Update docker image."""
|
||||
raise RuntimeError("Not support on supervisor docker container!")
|
@@ -1,112 +0,0 @@
|
||||
"""Host controll for HassIO."""
|
||||
import asyncio
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import stat
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .const import SOCKET_HC
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TIMEOUT = 15
|
||||
|
||||
LEVEL_POWER = 1
|
||||
LEVEL_UPDATE_SUPERVISOR = 2
|
||||
LEVEL_UPDATE_HOST = 4
|
||||
LEVEL_NETWORK = 8
|
||||
|
||||
|
||||
class HostControll(object):
|
||||
"""Client for host controll."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize HostControll socket client."""
|
||||
self.loop = loop
|
||||
self.active = False
|
||||
self.version = None
|
||||
|
||||
mode = os.stat(SOCKET_HC)[stat.ST_MODE]
|
||||
if stat.S_ISSOCK(mode):
|
||||
self.active = True
|
||||
|
||||
async def _send_command(self, command):
|
||||
"""Send command to host.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
if not self.active:
|
||||
return
|
||||
|
||||
reader, writer = await asyncio.open_unix_connection(
|
||||
SOCKET_HC, loop=self.loop)
|
||||
|
||||
try:
|
||||
# send
|
||||
_LOGGER.info("Send '%s' to HostControll.", command)
|
||||
|
||||
with async_timeout.timeout(TIMEOUT, loop=self.loop):
|
||||
writer.write("{}\n".format(command).encode())
|
||||
data = await reader.readline()
|
||||
|
||||
response = data.decode()
|
||||
_LOGGER.debug("Receive from HostControll: %s.", response)
|
||||
|
||||
if response == "OK":
|
||||
return True
|
||||
elif response == "ERROR":
|
||||
return False
|
||||
elif response == "WRONG":
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
return json.loads(response)
|
||||
except json.JSONDecodeError:
|
||||
_LOGGER.warning("Json parse error from HostControll.")
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.error("Timeout from HostControll!")
|
||||
|
||||
finally:
|
||||
writer.close()
|
||||
|
||||
def info(self):
|
||||
"""Return Info from host.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("info")
|
||||
|
||||
def reboot(self):
|
||||
"""Reboot the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("reboot")
|
||||
|
||||
def shutdown(self):
|
||||
"""Shutdown the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._send_command("shutdown")
|
||||
|
||||
def host_update(self, version=None):
|
||||
"""Update the host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("host-update {}".format(version))
|
||||
return self._send_command("host-update")
|
||||
|
||||
def supervisor_update(self, version=None):
|
||||
"""Update the supervisor on host system.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if version:
|
||||
return self._send_command("supervisor-update {}".format(version))
|
||||
return self._send_command("supervisor-update")
|
@@ -1,54 +0,0 @@
|
||||
"""Schedule for HassIO."""
|
||||
import logging
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SEC = 'seconds'
|
||||
REPEAT = 'repeat'
|
||||
CALL = 'callback'
|
||||
TASK = 'task'
|
||||
|
||||
|
||||
class Scheduler(object):
|
||||
"""Schedule task inside HassIO."""
|
||||
|
||||
def __init__(self, loop):
|
||||
"""Initialize task schedule."""
|
||||
self.loop = loop
|
||||
self._data = {}
|
||||
|
||||
def register_task(self, coro_callback, seconds, repeat=True,
|
||||
first_run=False):
|
||||
"""Schedule a coroutine.
|
||||
|
||||
The coroutien need to be a callback without arguments.
|
||||
"""
|
||||
idx = hash(coro_callback)
|
||||
|
||||
# generate data
|
||||
opts = {
|
||||
CALL: coro_callback,
|
||||
SEC: seconds,
|
||||
REPEAT: repeat,
|
||||
}
|
||||
self._data[idx] = opts
|
||||
|
||||
# schedule task
|
||||
if first_run:
|
||||
self._run_task(idx)
|
||||
else:
|
||||
task = self.loop.call_later(seconds, self._run_task, idx)
|
||||
self._data[idx][TASK] = task
|
||||
|
||||
return idx
|
||||
|
||||
def _run_task(self, idx):
|
||||
"""Run a scheduled task."""
|
||||
data = self._data.pop(idx)
|
||||
|
||||
self.loop.create_task(data[CALL]())
|
||||
|
||||
if data[REPEAT]:
|
||||
task = self.loop.call_later(data[SEC], self._run_task, idx)
|
||||
data[TASK] = task
|
||||
self._data[idx] = data
|
@@ -1,58 +0,0 @@
|
||||
"""Tools file for HassIO."""
|
||||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
import socket
|
||||
|
||||
import aiohttp
|
||||
import async_timeout
|
||||
|
||||
from .const import URL_HASSIO_VERSION, URL_HASSIO_VERSION_BETA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_RE_VERSION = re.compile(r"VERSION=(.*)")
|
||||
|
||||
|
||||
async def fetch_current_versions(websession, beta=False):
|
||||
"""Fetch current versions from github.
|
||||
|
||||
Is a coroutine.
|
||||
"""
|
||||
url = URL_HASSIO_VERSION_BETA if beta else URL_HASSIO_VERSION
|
||||
try:
|
||||
with async_timeout.timeout(10, loop=websession.loop):
|
||||
async with websession.get(url) as request:
|
||||
return await request.json(content_type=None)
|
||||
|
||||
except (ValueError, aiohttp.ClientError, asyncio.TimeoutError) as err:
|
||||
_LOGGER.warning("Can't fetch versions from %s! %s", url, err)
|
||||
|
||||
|
||||
def get_version_from_env(env_list):
|
||||
"""Extract Version from ENV list."""
|
||||
for env in env_list:
|
||||
found = _RE_VERSION.match(env)
|
||||
if found:
|
||||
return found.group(1)
|
||||
|
||||
_LOGGER.error("Can't find VERSION in env")
|
||||
return None
|
||||
|
||||
|
||||
def get_local_ip(loop):
|
||||
"""Retrieve local IP address.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||
|
||||
# Use Google Public DNS server to determine own IP
|
||||
sock.connect(('8.8.8.8', 80))
|
||||
|
||||
return sock.getsockname()[0]
|
||||
except socket.error:
|
||||
return socket.gethostbyname(socket.gethostname())
|
||||
finally:
|
||||
sock.close()
|
1
home-assistant-polymer
Submodule
1
home-assistant-polymer
Submodule
Submodule home-assistant-polymer added at 554c0b692d
27
pylintrc
27
pylintrc
@@ -15,24 +15,37 @@ reports=no
|
||||
# abstract-method - with intro of async there are always methods missing
|
||||
|
||||
disable=
|
||||
locally-disabled,
|
||||
duplicate-code,
|
||||
cyclic-import,
|
||||
abstract-class-little-used,
|
||||
abstract-class-not-used,
|
||||
unused-argument,
|
||||
abstract-method,
|
||||
cyclic-import,
|
||||
duplicate-code,
|
||||
global-statement,
|
||||
locally-disabled,
|
||||
not-context-manager,
|
||||
redefined-variable-type,
|
||||
too-few-public-methods,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-instance-attributes,
|
||||
too-many-lines,
|
||||
too-many-locals,
|
||||
too-many-public-methods,
|
||||
too-many-return-statements,
|
||||
too-many-statements,
|
||||
too-many-lines,
|
||||
unused-argument,
|
||||
line-too-long,
|
||||
bad-continuation,
|
||||
too-few-public-methods,
|
||||
abstract-method
|
||||
no-self-use,
|
||||
not-async-context-manager,
|
||||
too-many-locals,
|
||||
too-many-branches,
|
||||
no-else-return
|
||||
|
||||
[EXCEPTIONS]
|
||||
overgeneral-exceptions=Exception,HomeAssistantError
|
||||
overgeneral-exceptions=Exception
|
||||
|
||||
|
||||
[TYPECHECK]
|
||||
ignored-modules = distutils
|
||||
|
18
requirements.txt
Normal file
18
requirements.txt
Normal file
@@ -0,0 +1,18 @@
|
||||
aiohttp==3.6.1
|
||||
async_timeout==3.0.1
|
||||
attrs==19.3.0
|
||||
cchardet==2.1.6
|
||||
colorlog==4.1.0
|
||||
cpe==1.2.1
|
||||
cryptography==2.8
|
||||
docker==4.2.0
|
||||
gitpython==3.1.0
|
||||
jinja2==2.11.1
|
||||
packaging==20.3
|
||||
ptvsd==4.3.2
|
||||
pulsectl==20.2.4
|
||||
pytz==2019.3
|
||||
pyudev==0.22.0
|
||||
ruamel.yaml==0.15.100
|
||||
uvloop==0.14.0
|
||||
voluptuous==0.11.7
|
6
requirements_tests.txt
Normal file
6
requirements_tests.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
flake8==3.7.9
|
||||
pylint==2.4.4
|
||||
pytest==5.4.1
|
||||
pytest-timeout==1.3.4
|
||||
pytest-aiohttp==0.3.0
|
||||
black==19.10b0
|
9
rootfs/etc/cont-init.d/udev.sh
Normal file
9
rootfs/etc/cont-init.d/udev.sh
Normal file
@@ -0,0 +1,9 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start udev service
|
||||
# ==============================================================================
|
||||
udevd --daemon
|
||||
|
||||
bashio::log.info "Update udev informations"
|
||||
udevadm trigger
|
||||
udevadm settle
|
35
rootfs/etc/pulse/client.conf
Normal file
35
rootfs/etc/pulse/client.conf
Normal file
@@ -0,0 +1,35 @@
|
||||
# This file is part of PulseAudio.
|
||||
#
|
||||
# PulseAudio is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Lesser General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# PulseAudio is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU Lesser General Public License
|
||||
# along with PulseAudio; if not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
## Configuration file for PulseAudio clients. See pulse-client.conf(5) for
|
||||
## more information. Default values are commented out. Use either ; or # for
|
||||
## commenting.
|
||||
|
||||
; default-sink =
|
||||
; default-source =
|
||||
default-server = unix://data/audio/external/pulse.sock
|
||||
; default-dbus-server =
|
||||
|
||||
autospawn = no
|
||||
; daemon-binary = /usr/bin/pulseaudio
|
||||
; extra-arguments = --log-target=syslog
|
||||
|
||||
; cookie-file =
|
||||
|
||||
; enable-shm = yes
|
||||
; shm-size-bytes = 0 # setting this 0 will use the system-default, usually 64 MiB
|
||||
|
||||
; auto-connect-localhost = no
|
||||
; auto-connect-display = no
|
5
rootfs/etc/services.d/supervisor/finish
Normal file
5
rootfs/etc/services.d/supervisor/finish
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/execlineb -S0
|
||||
# ==============================================================================
|
||||
# Take down the S6 supervision tree when Supervisor fails
|
||||
# ==============================================================================
|
||||
s6-svscanctl -t /var/run/s6/services
|
7
rootfs/etc/services.d/supervisor/run
Normal file
7
rootfs/etc/services.d/supervisor/run
Normal file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/with-contenv bashio
|
||||
# ==============================================================================
|
||||
# Start Service service
|
||||
# ==============================================================================
|
||||
export LD_PRELOAD="/usr/local/lib/libjemalloc.so.2"
|
||||
|
||||
exec python3 -m supervisor
|
133
scripts/test_env.sh
Executable file
133
scripts/test_env.sh
Executable file
@@ -0,0 +1,133 @@
|
||||
#!/bin/bash
|
||||
set -eE
|
||||
|
||||
DOCKER_TIMEOUT=30
|
||||
DOCKER_PID=0
|
||||
|
||||
|
||||
function start_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Starting docker."
|
||||
dockerd 2> /dev/null &
|
||||
DOCKER_PID=$!
|
||||
|
||||
echo "Waiting for docker to initialize..."
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
until docker info >/dev/null 2>&1; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for docker to come up"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
echo "Docker was initialized"
|
||||
}
|
||||
|
||||
|
||||
function stop_docker() {
|
||||
local starttime
|
||||
local endtime
|
||||
|
||||
echo "Stopping in container docker..."
|
||||
if [ "$DOCKER_PID" -gt 0 ] && kill -0 "$DOCKER_PID" 2> /dev/null; then
|
||||
starttime="$(date +%s)"
|
||||
endtime="$(date +%s)"
|
||||
|
||||
# Now wait for it to die
|
||||
kill "$DOCKER_PID"
|
||||
while kill -0 "$DOCKER_PID" 2> /dev/null; do
|
||||
if [ $((endtime - starttime)) -le $DOCKER_TIMEOUT ]; then
|
||||
sleep 1
|
||||
endtime=$(date +%s)
|
||||
else
|
||||
echo "Timeout while waiting for container docker to die"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
else
|
||||
echo "Your host might have been left with unreleased resources"
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function build_supervisor() {
|
||||
docker pull homeassistant/amd64-builder:dev
|
||||
|
||||
docker run --rm --privileged \
|
||||
-v /run/docker.sock:/run/docker.sock -v "$(pwd):/data" \
|
||||
homeassistant/amd64-builder:dev \
|
||||
--generic dev -t /data --test --amd64 --no-cache
|
||||
}
|
||||
|
||||
|
||||
function cleanup_lastboot() {
|
||||
if [[ -f /workspaces/test_supervisor/config.json ]]; then
|
||||
echo "Cleaning up last boot"
|
||||
cp /workspaces/test_supervisor/config.json /tmp/config.json
|
||||
jq -rM 'del(.last_boot)' /tmp/config.json > /workspaces/test_supervisor/config.json
|
||||
rm /tmp/config.json
|
||||
fi
|
||||
}
|
||||
|
||||
|
||||
function cleanup_docker() {
|
||||
echo "Cleaning up stopped containers..."
|
||||
docker rm $(docker ps -a -q) || true
|
||||
}
|
||||
|
||||
|
||||
function setup_test_env() {
|
||||
mkdir -p /workspaces/test_supervisor
|
||||
|
||||
echo "Start Supervisor"
|
||||
docker run --rm --privileged \
|
||||
--name hassio_supervisor \
|
||||
--security-opt seccomp=unconfined \
|
||||
--security-opt apparmor:unconfined \
|
||||
-v /run/docker.sock:/run/docker.sock \
|
||||
-v /run/dbus:/run/dbus \
|
||||
-v "/workspaces/test_supervisor":/data \
|
||||
-v /etc/machine-id:/etc/machine-id:ro \
|
||||
-e SUPERVISOR_SHARE="/workspaces/test_supervisor" \
|
||||
-e SUPERVISOR_NAME=hassio_supervisor \
|
||||
-e SUPERVISOR_DEV=1 \
|
||||
-e HOMEASSISTANT_REPOSITORY="homeassistant/qemux86-64-homeassistant" \
|
||||
homeassistant/amd64-hassio-supervisor:latest
|
||||
|
||||
}
|
||||
|
||||
|
||||
function init_dbus() {
|
||||
if pgrep dbus-daemon; then
|
||||
echo "Dbus is running"
|
||||
return 0
|
||||
fi
|
||||
|
||||
echo "Startup dbus"
|
||||
mkdir -p /var/lib/dbus
|
||||
cp -f /etc/machine-id /var/lib/dbus/machine-id
|
||||
|
||||
# cleanups
|
||||
mkdir -p /run/dbus
|
||||
rm -f /run/dbus/pid
|
||||
|
||||
# run
|
||||
dbus-daemon --system --print-address
|
||||
}
|
||||
|
||||
echo "Start Test-Env"
|
||||
|
||||
start_docker
|
||||
trap "stop_docker" ERR
|
||||
|
||||
build_supervisor
|
||||
cleanup_lastboot
|
||||
cleanup_docker
|
||||
init_dbus
|
||||
setup_test_env
|
||||
stop_docker
|
18
scripts/update-frontend.sh
Executable file
18
scripts/update-frontend.sh
Executable file
@@ -0,0 +1,18 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
# Update frontend
|
||||
git submodule update --init --recursive --remote
|
||||
|
||||
[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
|
||||
cd home-assistant-polymer
|
||||
nvm install
|
||||
script/bootstrap
|
||||
|
||||
# build frontend
|
||||
cd hassio
|
||||
./script/build_hassio
|
||||
|
||||
# Copy frontend
|
||||
rm -f ../../supervisor/hassio/api/panel/chunk.*
|
||||
cp -rf build/* ../../supervisor/api/panel/
|
17
setup.cfg
Normal file
17
setup.cfg
Normal file
@@ -0,0 +1,17 @@
|
||||
[isort]
|
||||
multi_line_output = 3
|
||||
include_trailing_comma=True
|
||||
force_grid_wrap=0
|
||||
line_length=88
|
||||
indent = " "
|
||||
not_skip = __init__.py
|
||||
force_sort_within_sections = true
|
||||
sections = FUTURE,STDLIB,INBETWEENS,THIRDPARTY,FIRSTPARTY,LOCALFOLDER
|
||||
default_section = THIRDPARTY
|
||||
forced_separate = tests
|
||||
combine_as_imports = true
|
||||
use_parentheses = true
|
||||
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
ignore = E501, W503
|
65
setup.py
65
setup.py
@@ -1,40 +1,43 @@
|
||||
"""Home Assistant Supervisor setup."""
|
||||
from setuptools import setup
|
||||
|
||||
from hassio.const import HASSIO_VERSION
|
||||
|
||||
from supervisor.const import SUPERVISOR_VERSION
|
||||
|
||||
setup(
|
||||
name='HassIO',
|
||||
version=HASSIO_VERSION,
|
||||
license='BSD License',
|
||||
author='The Home Assistant Authors',
|
||||
author_email='hello@home-assistant.io',
|
||||
url='https://home-assistant.io/',
|
||||
description=('Open-source private cloud os for Home-Assistant'
|
||||
' based on ResinOS'),
|
||||
long_description=('A maintenainless private cloud operator system that'
|
||||
'setup a Home-Assistant instance. Based on ResinOS'),
|
||||
name="Supervisor",
|
||||
version=SUPERVISOR_VERSION,
|
||||
license="BSD License",
|
||||
author="The Home Assistant Authors",
|
||||
author_email="hello@home-assistant.io",
|
||||
url="https://home-assistant.io/",
|
||||
description=("Open-source private cloud os for Home-Assistant" " based on HassOS"),
|
||||
long_description=(
|
||||
"A maintainless private cloud operator system that"
|
||||
"setup a Home-Assistant instance. Based on HassOS"
|
||||
),
|
||||
classifiers=[
|
||||
'Intended Audience :: End Users/Desktop',
|
||||
'Intended Audience :: Developers',
|
||||
'License :: OSI Approved :: Apache Software License',
|
||||
'Operating System :: OS Independent',
|
||||
'Topic :: Home Automation'
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
'Topic :: Scientific/Engineering :: Atmospheric Science',
|
||||
'Development Status :: 5 - Production/Stable',
|
||||
'Intended Audience :: Developers',
|
||||
'Programming Language :: Python :: 3.5',
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: Apache Software License",
|
||||
"Operating System :: OS Independent",
|
||||
"Topic :: Home Automation",
|
||||
"Topic :: Software Development :: Libraries :: Python Modules",
|
||||
"Topic :: Scientific/Engineering :: Atmospheric Science",
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Intended Audience :: Developers",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
],
|
||||
keywords=['docker', 'home-assistant', 'api'],
|
||||
keywords=["docker", "home-assistant", "api"],
|
||||
zip_safe=False,
|
||||
platforms='any',
|
||||
packages=['hassio', 'hassio.dock', 'hassio.api'],
|
||||
platforms="any",
|
||||
packages=[
|
||||
"supervisor",
|
||||
"supervisor.docker",
|
||||
"supervisor.addons",
|
||||
"supervisor.api",
|
||||
"supervisor.misc",
|
||||
"supervisor.utils",
|
||||
"supervisor.snapshots",
|
||||
],
|
||||
include_package_data=True,
|
||||
install_requires=[
|
||||
'async_timeout',
|
||||
'aiohttp',
|
||||
'docker',
|
||||
'colorlog',
|
||||
]
|
||||
)
|
||||
|
1
supervisor/__init__.py
Normal file
1
supervisor/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Init file for Supervisor."""
|
62
supervisor/__main__.py
Normal file
62
supervisor/__main__.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""Main file for Supervisor."""
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from supervisor import bootstrap
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def initialize_event_loop():
|
||||
"""Attempt to use uvloop."""
|
||||
try:
|
||||
# pylint: disable=import-outside-toplevel
|
||||
import uvloop
|
||||
|
||||
uvloop.install()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return asyncio.get_event_loop()
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
if __name__ == "__main__":
|
||||
bootstrap.initialize_logging()
|
||||
|
||||
# Init async event loop
|
||||
loop = initialize_event_loop()
|
||||
|
||||
# Check if all information are available to setup Supervisor
|
||||
bootstrap.check_environment()
|
||||
|
||||
# init executor pool
|
||||
executor = ThreadPoolExecutor(thread_name_prefix="SyncWorker")
|
||||
loop.set_default_executor(executor)
|
||||
|
||||
_LOGGER.info("Initialize Supervisor setup")
|
||||
coresys = loop.run_until_complete(bootstrap.initialize_coresys())
|
||||
loop.run_until_complete(coresys.core.connect())
|
||||
|
||||
bootstrap.supervisor_debugger(coresys)
|
||||
bootstrap.migrate_system_env(coresys)
|
||||
|
||||
_LOGGER.info("Setup Supervisor")
|
||||
loop.run_until_complete(coresys.core.setup())
|
||||
|
||||
loop.call_soon_threadsafe(loop.create_task, coresys.core.start())
|
||||
loop.call_soon_threadsafe(bootstrap.reg_signal, loop)
|
||||
|
||||
try:
|
||||
_LOGGER.info("Run Supervisor")
|
||||
loop.run_forever()
|
||||
finally:
|
||||
_LOGGER.info("Stopping Supervisor")
|
||||
loop.run_until_complete(coresys.core.stop())
|
||||
executor.shutdown(wait=False)
|
||||
loop.close()
|
||||
|
||||
_LOGGER.info("Close Supervisor")
|
||||
sys.exit(0)
|
334
supervisor/addons/__init__.py
Normal file
334
supervisor/addons/__init__.py
Normal file
@@ -0,0 +1,334 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
import tarfile
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from ..const import BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..exceptions import (
|
||||
AddonsError,
|
||||
AddonsNotSupportedError,
|
||||
CoreDNSError,
|
||||
DockerAPIError,
|
||||
HomeAssistantAPIError,
|
||||
HostAppArmorError,
|
||||
)
|
||||
from ..store.addon import AddonStore
|
||||
from .addon import Addon
|
||||
from .data import AddonsData
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
AnyAddon = Union[Addon, AddonStore]
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.data: AddonsData = AddonsData(coresys)
|
||||
self.local: Dict[str, Addon] = {}
|
||||
self.store: Dict[str, AddonStore] = {}
|
||||
|
||||
@property
|
||||
def all(self) -> List[AnyAddon]:
|
||||
"""Return a list of all add-ons."""
|
||||
addons = {**self.store, **self.local}
|
||||
return list(addons.values())
|
||||
|
||||
@property
|
||||
def installed(self) -> List[Addon]:
|
||||
"""Return a list of all installed add-ons."""
|
||||
return list(self.local.values())
|
||||
|
||||
def get(self, addon_slug: str) -> Optional[AnyAddon]:
|
||||
"""Return an add-on from slug.
|
||||
|
||||
Prio:
|
||||
1 - Local
|
||||
2 - Store
|
||||
"""
|
||||
if addon_slug in self.local:
|
||||
return self.local[addon_slug]
|
||||
return self.store.get(addon_slug)
|
||||
|
||||
def from_token(self, token: str) -> Optional[Addon]:
|
||||
"""Return an add-on from Supervisor token."""
|
||||
for addon in self.installed:
|
||||
if token == addon.supervisor_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Start up add-on management."""
|
||||
tasks = []
|
||||
for slug in self.data.system:
|
||||
addon = self.local[slug] = Addon(self.coresys, slug)
|
||||
tasks.append(addon.load())
|
||||
|
||||
# Run initial tasks
|
||||
_LOGGER.info("Found %d installed add-ons", len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# Sync DNS
|
||||
await self.sync_dns()
|
||||
|
||||
async def boot(self, stage: str) -> None:
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.installed:
|
||||
if addon.boot != BOOT_AUTO or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon.start())
|
||||
|
||||
_LOGGER.info("Phase '%s' start %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage: str) -> None:
|
||||
"""Shutdown addons."""
|
||||
tasks = []
|
||||
for addon in self.installed:
|
||||
if await addon.state() != STATE_STARTED or addon.startup != stage:
|
||||
continue
|
||||
tasks.append(addon.stop())
|
||||
|
||||
_LOGGER.info("Phase '%s' stop %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
async def install(self, slug: str) -> None:
|
||||
"""Install an add-on."""
|
||||
if slug in self.local:
|
||||
_LOGGER.warning("Add-on %s is already installed", slug)
|
||||
return
|
||||
store = self.store.get(slug)
|
||||
|
||||
if not store:
|
||||
_LOGGER.error("Add-on %s not exists", slug)
|
||||
raise AddonsError()
|
||||
|
||||
if not store.available:
|
||||
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
self.data.install(store)
|
||||
addon = Addon(self.coresys, slug)
|
||||
|
||||
if not addon.path_data.is_dir():
|
||||
_LOGGER.info("Create Home Assistant add-on data folder %s", addon.path_data)
|
||||
addon.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
try:
|
||||
await addon.instance.install(store.version, store.image)
|
||||
except DockerAPIError:
|
||||
self.data.uninstall(addon)
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self.local[slug] = addon
|
||||
_LOGGER.info("Add-on '%s' successfully installed", slug)
|
||||
|
||||
async def uninstall(self, slug: str) -> None:
|
||||
"""Remove an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.warning("Add-on %s is not installed", slug)
|
||||
return
|
||||
addon = self.local.get(slug)
|
||||
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
await addon.remove_data()
|
||||
|
||||
# Cleanup audio settings
|
||||
if addon.path_pulse.exists():
|
||||
with suppress(OSError):
|
||||
addon.path_pulse.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
with suppress(HostAppArmorError):
|
||||
await addon.uninstall_apparmor()
|
||||
|
||||
# Cleanup Ingress panel from sidebar
|
||||
if addon.ingress_panel:
|
||||
addon.ingress_panel = False
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
# Cleanup discovery data
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != addon.slug:
|
||||
continue
|
||||
self.sys_discovery.remove(message)
|
||||
|
||||
# Cleanup services data
|
||||
for service in self.sys_services.list_services:
|
||||
if addon.slug not in service.active:
|
||||
continue
|
||||
service.del_service_data(addon)
|
||||
|
||||
self.data.uninstall(addon)
|
||||
self.local.pop(slug)
|
||||
|
||||
_LOGGER.info("Add-on '%s' successfully removed", slug)
|
||||
|
||||
async def update(self, slug: str) -> None:
|
||||
"""Update add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
addon = self.local.get(slug)
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
store = self.store.get(slug)
|
||||
|
||||
if addon.version == store.version:
|
||||
_LOGGER.warning("No update available for add-on %s", slug)
|
||||
return
|
||||
|
||||
# Check if available, Maybe something have changed
|
||||
if not store.available:
|
||||
_LOGGER.error("Add-on %s not supported on that platform", slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# Update instance
|
||||
last_state = await addon.state()
|
||||
try:
|
||||
await addon.instance.update(store.version, store.image)
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
await addon.instance.cleanup()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self.data.update(store)
|
||||
_LOGGER.info("Add-on '%s' successfully updated", slug)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await addon.install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await addon.start()
|
||||
|
||||
async def rebuild(self, slug: str) -> None:
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.error("Add-on %s is not installed", slug)
|
||||
raise AddonsError()
|
||||
addon = self.local.get(slug)
|
||||
|
||||
if addon.is_detached:
|
||||
_LOGGER.error("Add-on %s is not available inside store", slug)
|
||||
raise AddonsError()
|
||||
store = self.store.get(slug)
|
||||
|
||||
# Check if a rebuild is possible now
|
||||
if addon.version != store.version:
|
||||
_LOGGER.error("Version changed, use Update instead Rebuild")
|
||||
raise AddonsError()
|
||||
if not addon.need_build:
|
||||
_LOGGER.error("Can't rebuild a image based add-on")
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# remove docker container but not addon config
|
||||
last_state = await addon.state()
|
||||
try:
|
||||
await addon.instance.remove()
|
||||
await addon.instance.install(addon.version)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
else:
|
||||
self.data.update(store)
|
||||
_LOGGER.info("Add-on '%s' successfully rebuilt", slug)
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await addon.start()
|
||||
|
||||
async def restore(self, slug: str, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
if slug not in self.local:
|
||||
_LOGGER.debug("Add-on %s is not local available for restore", slug)
|
||||
addon = Addon(self.coresys, slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s is local available for restore", slug)
|
||||
addon = self.local[slug]
|
||||
|
||||
await addon.restore(tar_file)
|
||||
|
||||
# Check if new
|
||||
if slug not in self.local:
|
||||
_LOGGER.info("Detect new Add-on after restore %s", slug)
|
||||
self.local[slug] = addon
|
||||
|
||||
# Update ingress
|
||||
if addon.with_ingress:
|
||||
with suppress(HomeAssistantAPIError):
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
async def repair(self) -> None:
|
||||
"""Repair local add-ons."""
|
||||
needs_repair: List[Addon] = []
|
||||
|
||||
# Evaluate Add-ons to repair
|
||||
for addon in self.installed:
|
||||
if await addon.instance.exists():
|
||||
continue
|
||||
needs_repair.append(addon)
|
||||
|
||||
_LOGGER.info("Found %d add-ons to repair", len(needs_repair))
|
||||
if not needs_repair:
|
||||
return
|
||||
|
||||
for addon in needs_repair:
|
||||
_LOGGER.info("Start repair for add-on: %s", addon.slug)
|
||||
await self.sys_run_in_executor(
|
||||
self.sys_docker.network.stale_cleanup, addon.instance.name
|
||||
)
|
||||
|
||||
with suppress(DockerAPIError, KeyError):
|
||||
# Need pull a image again
|
||||
if not addon.need_build:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
# Need local lookup
|
||||
if addon.need_build and not addon.is_detached:
|
||||
store = self.store[addon.slug]
|
||||
# If this add-on is available for rebuild
|
||||
if addon.version == store.version:
|
||||
await addon.instance.install(addon.version, addon.image)
|
||||
continue
|
||||
|
||||
_LOGGER.error("Can't repair %s", addon.slug)
|
||||
with suppress(AddonsError):
|
||||
await self.uninstall(addon.slug)
|
||||
|
||||
async def sync_dns(self) -> None:
|
||||
"""Sync add-ons DNS names."""
|
||||
# Update hosts
|
||||
for addon in self.installed:
|
||||
if not await addon.instance.is_running():
|
||||
continue
|
||||
self.sys_dns.add_host(
|
||||
ipv4=addon.ip_address, names=[addon.hostname], write=False
|
||||
)
|
||||
|
||||
# Write hosts files
|
||||
with suppress(CoreDNSError):
|
||||
self.sys_dns.write_hosts()
|
684
supervisor/addons/addon.py
Normal file
684
supervisor/addons/addon.py
Normal file
@@ -0,0 +1,684 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
import secrets
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_NETWORK,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PORTS,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_STATE,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
DNS_SUFFIX,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import (
|
||||
AddonsError,
|
||||
AddonsNotSupportedError,
|
||||
DockerAPIError,
|
||||
HostAppArmorError,
|
||||
JsonFileError,
|
||||
)
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from ..utils.tar import exclude_filter, secure_path
|
||||
from .model import AddonModel, Data
|
||||
from .utils import remove_data
|
||||
from .validate import SCHEMA_ADDON_SNAPSHOT, validate_options
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
RE_WEBUI = re.compile(
|
||||
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$"
|
||||
)
|
||||
|
||||
RE_OLD_AUDIO = re.compile(r"\d+,\d+")
|
||||
|
||||
|
||||
class Addon(AddonModel):
|
||||
"""Hold data for add-on inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, slug: str):
|
||||
"""Initialize data holder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.instance: DockerAddon = DockerAddon(coresys, self)
|
||||
self.slug: str = slug
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Async initialize of object."""
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.attach(tag=self.version)
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP of Add-on instance."""
|
||||
return self.instance.ip_address
|
||||
|
||||
@property
|
||||
def data(self) -> Data:
|
||||
"""Return add-on data/config."""
|
||||
return self.sys_addons.data.system[self.slug]
|
||||
|
||||
@property
|
||||
def data_store(self) -> Data:
|
||||
"""Return add-on data from store."""
|
||||
return self.sys_store.data.addons.get(self.slug, self.data)
|
||||
|
||||
@property
|
||||
def persist(self) -> Data:
|
||||
"""Return add-on data/config."""
|
||||
return self.sys_addons.data.user[self.slug]
|
||||
|
||||
@property
|
||||
def is_installed(self) -> bool:
|
||||
"""Return True if an add-on is installed."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def is_detached(self) -> bool:
|
||||
"""Return True if add-on is detached."""
|
||||
return self.slug not in self.sys_store.data.addons
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
return self._available(self.data_store)
|
||||
|
||||
@property
|
||||
def version(self) -> Optional[str]:
|
||||
"""Return installed version."""
|
||||
return self.persist[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def dns(self) -> List[str]:
|
||||
"""Return list of DNS name for that add-on."""
|
||||
return [f"{self.hostname}.{DNS_SUFFIX}"]
|
||||
|
||||
@property
|
||||
def options(self) -> Dict[str, Any]:
|
||||
"""Return options with local changes."""
|
||||
return {**self.data[ATTR_OPTIONS], **self.persist[ATTR_OPTIONS]}
|
||||
|
||||
@options.setter
|
||||
def options(self, value: Optional[Dict[str, Any]]):
|
||||
"""Store user add-on options."""
|
||||
if value is None:
|
||||
self.persist[ATTR_OPTIONS] = {}
|
||||
else:
|
||||
self.persist[ATTR_OPTIONS] = deepcopy(value)
|
||||
|
||||
@property
|
||||
def boot(self) -> bool:
|
||||
"""Return boot config with prio local settings."""
|
||||
return self.persist.get(ATTR_BOOT, super().boot)
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value: bool):
|
||||
"""Store user boot options."""
|
||||
self.persist[ATTR_BOOT] = value
|
||||
|
||||
@property
|
||||
def auto_update(self) -> bool:
|
||||
"""Return if auto update is enable."""
|
||||
return self.persist.get(ATTR_AUTO_UPDATE, super().auto_update)
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value: bool):
|
||||
"""Set auto update."""
|
||||
self.persist[ATTR_AUTO_UPDATE] = value
|
||||
|
||||
@property
|
||||
def uuid(self) -> str:
|
||||
"""Return an API token for this add-on."""
|
||||
return self.persist[ATTR_UUID]
|
||||
|
||||
@property
|
||||
def supervisor_token(self) -> Optional[str]:
|
||||
"""Return access token for Supervisor API."""
|
||||
return self.persist.get(ATTR_ACCESS_TOKEN)
|
||||
|
||||
@property
|
||||
def ingress_token(self) -> Optional[str]:
|
||||
"""Return access token for Supervisor API."""
|
||||
return self.persist.get(ATTR_INGRESS_TOKEN)
|
||||
|
||||
@property
|
||||
def ingress_entry(self) -> Optional[str]:
|
||||
"""Return ingress external URL."""
|
||||
if self.with_ingress:
|
||||
return f"/api/hassio_ingress/{self.ingress_token}"
|
||||
return None
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Return version of add-on."""
|
||||
return self.data_store[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def protected(self) -> bool:
|
||||
"""Return if add-on is in protected mode."""
|
||||
return self.persist[ATTR_PROTECTED]
|
||||
|
||||
@protected.setter
|
||||
def protected(self, value: bool):
|
||||
"""Set add-on in protected mode."""
|
||||
self.persist[ATTR_PROTECTED] = value
|
||||
|
||||
@property
|
||||
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||
"""Return ports of add-on."""
|
||||
return self.persist.get(ATTR_NETWORK, super().ports)
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value: Optional[Dict[str, Optional[int]]]):
|
||||
"""Set custom ports of add-on."""
|
||||
if value is None:
|
||||
self.persist.pop(ATTR_NETWORK, None)
|
||||
return
|
||||
|
||||
# Secure map ports to value
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self.data.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
|
||||
self.persist[ATTR_NETWORK] = new_ports
|
||||
|
||||
@property
|
||||
def ingress_url(self) -> Optional[str]:
|
||||
"""Return URL to ingress url."""
|
||||
if not self.with_ingress:
|
||||
return None
|
||||
|
||||
url = f"/api/hassio_ingress/{self.ingress_token}/"
|
||||
if ATTR_INGRESS_ENTRY in self.data:
|
||||
return f"{url}{self.data[ATTR_INGRESS_ENTRY]}"
|
||||
return url
|
||||
|
||||
@property
|
||||
def webui(self) -> Optional[str]:
|
||||
"""Return URL to webui or None."""
|
||||
url = super().webui
|
||||
if not url:
|
||||
return None
|
||||
webui = RE_WEBUI.match(url)
|
||||
|
||||
# extract arguments
|
||||
t_port = webui.group("t_port")
|
||||
t_proto = webui.group("t_proto")
|
||||
s_prefix = webui.group("s_prefix") or ""
|
||||
s_suffix = webui.group("s_suffix") or ""
|
||||
|
||||
# search host port for this docker port
|
||||
if self.ports is None:
|
||||
port = t_port
|
||||
else:
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
|
||||
# for interface config or port lists
|
||||
if isinstance(port, (tuple, list)):
|
||||
port = port[-1]
|
||||
|
||||
# lookup the correct protocol from config
|
||||
if t_proto:
|
||||
proto = "https" if self.options.get(t_proto) else "http"
|
||||
else:
|
||||
proto = s_prefix
|
||||
|
||||
return f"{proto}://[HOST]:{port}{s_suffix}"
|
||||
|
||||
@property
|
||||
def ingress_port(self) -> Optional[int]:
|
||||
"""Return Ingress port."""
|
||||
if not self.with_ingress:
|
||||
return None
|
||||
|
||||
port = self.data[ATTR_INGRESS_PORT]
|
||||
if port == 0:
|
||||
return self.sys_ingress.get_dynamic_port(self.slug)
|
||||
return port
|
||||
|
||||
@property
|
||||
def ingress_panel(self) -> Optional[bool]:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return self.persist[ATTR_INGRESS_PANEL]
|
||||
|
||||
@ingress_panel.setter
|
||||
def ingress_panel(self, value: bool):
|
||||
"""Return True if the add-on access support ingress."""
|
||||
self.persist[ATTR_INGRESS_PANEL] = value
|
||||
|
||||
@property
|
||||
def audio_output(self) -> Optional[str]:
|
||||
"""Return a pulse profile for output or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
|
||||
# Fallback with old audio settings
|
||||
# Remove after 210
|
||||
output_data = self.persist.get(ATTR_AUDIO_OUTPUT)
|
||||
if output_data and RE_OLD_AUDIO.fullmatch(output_data):
|
||||
return None
|
||||
return output_data
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value: Optional[str]):
|
||||
"""Set audio output profile settings."""
|
||||
self.persist[ATTR_AUDIO_OUTPUT] = value
|
||||
|
||||
@property
|
||||
def audio_input(self) -> Optional[str]:
|
||||
"""Return pulse profile for input or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
|
||||
# Fallback with old audio settings
|
||||
# Remove after 210
|
||||
input_data = self.persist.get(ATTR_AUDIO_INPUT)
|
||||
if input_data and RE_OLD_AUDIO.fullmatch(input_data):
|
||||
return None
|
||||
return input_data
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value: Optional[str]):
|
||||
"""Set audio input settings."""
|
||||
self.persist[ATTR_AUDIO_INPUT] = value
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of add-on."""
|
||||
return self.persist.get(ATTR_IMAGE)
|
||||
|
||||
@property
|
||||
def need_build(self):
|
||||
"""Return True if this add-on need a local build."""
|
||||
return ATTR_IMAGE not in self.data
|
||||
|
||||
@property
|
||||
def path_data(self):
|
||||
"""Return add-on data path inside Supervisor."""
|
||||
return Path(self.sys_config.path_addons_data, self.slug)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
"""Return add-on data path external for Docker."""
|
||||
return PurePath(self.sys_config.path_extern_addons_data, self.slug)
|
||||
|
||||
@property
|
||||
def path_options(self):
|
||||
"""Return path to add-on options."""
|
||||
return Path(self.path_data, "options.json")
|
||||
|
||||
@property
|
||||
def path_pulse(self):
|
||||
"""Return path to asound config."""
|
||||
return Path(self.sys_config.path_tmp, f"{self.slug}_pulse")
|
||||
|
||||
@property
|
||||
def path_extern_pulse(self):
|
||||
"""Return path to asound config for Docker."""
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_pulse")
|
||||
|
||||
def save_persist(self):
|
||||
"""Save data of add-on."""
|
||||
self.sys_addons.data.save_data()
|
||||
|
||||
async def write_options(self):
|
||||
"""Return True if add-on options is written to data."""
|
||||
schema = self.schema
|
||||
options = self.options
|
||||
|
||||
# Update secrets for validation
|
||||
await self.sys_secrets.reload()
|
||||
|
||||
try:
|
||||
options = schema(options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error(
|
||||
"Add-on %s have wrong options: %s",
|
||||
self.slug,
|
||||
humanize_error(options, ex),
|
||||
)
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Add-on %s can't write options", self.slug)
|
||||
else:
|
||||
_LOGGER.debug("Add-on %s write options: %s", self.slug, options)
|
||||
return
|
||||
|
||||
raise AddonsError()
|
||||
|
||||
async def remove_data(self):
|
||||
"""Remove add-on data."""
|
||||
if not self.path_data.is_dir():
|
||||
return
|
||||
|
||||
_LOGGER.info("Remove add-on data folder %s", self.path_data)
|
||||
await remove_data(self.path_data)
|
||||
|
||||
def write_pulse(self):
|
||||
"""Write asound config to file and return True on success."""
|
||||
pulse_config = self.sys_audio.pulse_client(
|
||||
input_profile=self.audio_input, output_profile=self.audio_output
|
||||
)
|
||||
|
||||
# Cleanup wrong maps
|
||||
if self.path_pulse.is_dir():
|
||||
shutil.rmtree(self.path_pulse, ignore_errors=True)
|
||||
|
||||
# Write pulse config
|
||||
try:
|
||||
with self.path_pulse.open("w") as config_file:
|
||||
config_file.write(pulse_config)
|
||||
except OSError as err:
|
||||
_LOGGER.error(
|
||||
"Add-on %s can't write pulse/client.config: %s", self.slug, err
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Add-on %s write pulse/client.config: %s", self.slug, self.path_pulse
|
||||
)
|
||||
|
||||
async def install_apparmor(self) -> None:
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||
exists_addon = self.path_apparmor.exists()
|
||||
|
||||
# Nothing to do
|
||||
if not exists_local and not exists_addon:
|
||||
return
|
||||
|
||||
# Need removed
|
||||
if exists_local and not exists_addon:
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
return
|
||||
|
||||
# Need install/update
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
|
||||
profile_file = Path(tmp_folder, "apparmor.txt")
|
||||
|
||||
adjust_profile(self.slug, self.path_apparmor, profile_file)
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
|
||||
async def uninstall_apparmor(self) -> None:
|
||||
"""Remove AppArmor profile for Add-on."""
|
||||
if not self.sys_host.apparmor.exists(self.slug):
|
||||
return
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
|
||||
def test_update_schema(self) -> bool:
|
||||
"""Check if the existing configuration is valid after update."""
|
||||
# load next schema
|
||||
new_raw_schema = self.data_store[ATTR_SCHEMA]
|
||||
default_options = self.data_store[ATTR_OPTIONS]
|
||||
|
||||
# if disabled
|
||||
if isinstance(new_raw_schema, bool):
|
||||
return True
|
||||
|
||||
# merge options
|
||||
options = {**self.persist[ATTR_OPTIONS], **default_options}
|
||||
|
||||
# create voluptuous
|
||||
new_schema = vol.Schema(
|
||||
vol.All(dict, validate_options(self.coresys, new_raw_schema))
|
||||
)
|
||||
|
||||
# validate
|
||||
try:
|
||||
new_schema(options)
|
||||
except vol.Invalid:
|
||||
_LOGGER.warning("Add-on %s new schema is not compatible", self.slug)
|
||||
return False
|
||||
return True
|
||||
|
||||
async def state(self) -> str:
|
||||
"""Return running state of add-on."""
|
||||
if await self.instance.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Set options and start add-on."""
|
||||
if await self.instance.is_running():
|
||||
_LOGGER.warning("%s already running!", self.slug)
|
||||
return
|
||||
|
||||
# Access Token
|
||||
self.persist[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_persist()
|
||||
|
||||
# Options
|
||||
await self.write_options()
|
||||
|
||||
# Sound
|
||||
if self.with_audio:
|
||||
self.write_pulse()
|
||||
|
||||
# Start Add-on
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop add-on."""
|
||||
try:
|
||||
return await self.instance.stop()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
async def restart(self) -> None:
|
||||
"""Restart add-on."""
|
||||
with suppress(AddonsError):
|
||||
await self.stop()
|
||||
await self.start()
|
||||
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Return add-ons log output.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
async def stats(self) -> DockerStats:
|
||||
"""Return stats of container."""
|
||||
try:
|
||||
return await self.instance.stats()
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
async def write_stdin(self, data):
|
||||
"""Write data to add-on stdin.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if not self.with_stdin:
|
||||
_LOGGER.error("Add-on don't support write to stdin!")
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
try:
|
||||
return await self.instance.write_stdin(data)
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
async def snapshot(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Snapshot state of an add-on."""
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
# store local image
|
||||
if self.need_build:
|
||||
try:
|
||||
await self.instance.export_image(Path(temp, "image.tar"))
|
||||
except DockerAPIError:
|
||||
raise AddonsError() from None
|
||||
|
||||
data = {
|
||||
ATTR_USER: self.persist,
|
||||
ATTR_SYSTEM: self.data,
|
||||
ATTR_VERSION: self.version,
|
||||
ATTR_STATE: await self.state(),
|
||||
}
|
||||
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(Path(temp, "addon.json"), data)
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Can't save meta for %s", self.slug)
|
||||
raise AddonsError() from None
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
profile = Path(temp, "apparmor.txt")
|
||||
try:
|
||||
self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't backup AppArmor profile")
|
||||
raise AddonsError() from None
|
||||
|
||||
# write into tarfile
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
with tar_file as snapshot:
|
||||
# Snapshot system
|
||||
snapshot.add(temp, arcname=".")
|
||||
|
||||
# Snapshot data
|
||||
snapshot.add(
|
||||
self.path_data,
|
||||
arcname="data",
|
||||
filter=exclude_filter(self.snapshot_exclude),
|
||||
)
|
||||
|
||||
try:
|
||||
_LOGGER.info("Build snapshot for add-on %s", self.slug)
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||
raise AddonsError() from None
|
||||
|
||||
_LOGGER.info("Finish snapshot for addon %s", self.slug)
|
||||
|
||||
async def restore(self, tar_file: tarfile.TarFile) -> None:
|
||||
"""Restore state of an add-on."""
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as temp:
|
||||
# extract snapshot
|
||||
def _extract_tarfile():
|
||||
"""Extract tar snapshot."""
|
||||
with tar_file as snapshot:
|
||||
snapshot.extractall(path=Path(temp), members=secure_path(snapshot))
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||
raise AddonsError() from None
|
||||
|
||||
# Read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, "addon.json"))
|
||||
except JsonFileError:
|
||||
raise AddonsError() from None
|
||||
|
||||
# Validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error(
|
||||
"Can't validate %s, snapshot data: %s",
|
||||
self.slug,
|
||||
humanize_error(data, err),
|
||||
)
|
||||
raise AddonsError() from None
|
||||
|
||||
# If available
|
||||
if not self._available(data[ATTR_SYSTEM]):
|
||||
_LOGGER.error("Add-on %s is not available for this Platform", self.slug)
|
||||
raise AddonsNotSupportedError()
|
||||
|
||||
# Restore local add-on informations
|
||||
_LOGGER.info("Restore config for addon %s", self.slug)
|
||||
restore_image = self._image(data[ATTR_SYSTEM])
|
||||
self.sys_addons.data.restore(
|
||||
self.slug, data[ATTR_USER], data[ATTR_SYSTEM], restore_image
|
||||
)
|
||||
|
||||
# Check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore/Install image for addon %s", self.slug)
|
||||
|
||||
image_file = Path(temp, "image.tar")
|
||||
if image_file.is_file():
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.import_image(image_file)
|
||||
else:
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.install(version, restore_image)
|
||||
await self.instance.cleanup()
|
||||
elif self.instance.version != version or self.legacy:
|
||||
_LOGGER.info("Restore/Update image for addon %s", self.slug)
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.update(version, restore_image)
|
||||
else:
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.stop()
|
||||
|
||||
# Restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
shutil.copytree(Path(temp, "data"), self.path_data)
|
||||
|
||||
_LOGGER.info("Restore data for addon %s", self.slug)
|
||||
if self.path_data.is_dir():
|
||||
await remove_data(self.path_data)
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data: %s", err)
|
||||
raise AddonsError() from None
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(temp, "apparmor.txt")
|
||||
if profile_file.exists():
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't restore AppArmor profile")
|
||||
raise AddonsError() from None
|
||||
|
||||
# Run add-on
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
return await self.start()
|
||||
|
||||
_LOGGER.info("Finish restore for add-on %s", self.slug)
|
79
supervisor/addons/build.py
Normal file
79
supervisor/addons/build.py
Normal file
@@ -0,0 +1,79 @@
|
||||
"""Supervisor add-on build environment."""
|
||||
from __future__ import annotations
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
|
||||
from ..const import ATTR_ARGS, ATTR_BUILD_FROM, ATTR_SQUASH, META_ADDON
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from .validate import SCHEMA_BUILD_CONFIG
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import AnyAddon
|
||||
|
||||
|
||||
class AddonBuild(JsonConfig, CoreSysAttributes):
|
||||
"""Handle build options for add-ons."""
|
||||
|
||||
def __init__(self, coresys: CoreSys, addon: AnyAddon) -> None:
|
||||
"""Initialize Supervisor add-on builder."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.addon = addon
|
||||
|
||||
super().__init__(
|
||||
Path(self.addon.path_location, "build.json"), SCHEMA_BUILD_CONFIG
|
||||
)
|
||||
|
||||
def save_data(self):
|
||||
"""Ignore save function."""
|
||||
raise RuntimeError()
|
||||
|
||||
@property
|
||||
def base_image(self) -> str:
|
||||
"""Base images for this add-on."""
|
||||
return self._data[ATTR_BUILD_FROM].get(
|
||||
self.sys_arch.default, f"homeassistant/{self.sys_arch.default}-base:latest"
|
||||
)
|
||||
|
||||
@property
|
||||
def squash(self) -> bool:
|
||||
"""Return True or False if squash is active."""
|
||||
return self._data[ATTR_SQUASH]
|
||||
|
||||
@property
|
||||
def additional_args(self) -> Dict[str, str]:
|
||||
"""Return additional Docker build arguments."""
|
||||
return self._data[ATTR_ARGS]
|
||||
|
||||
def get_docker_args(self, version):
|
||||
"""Create a dict with Docker build arguments."""
|
||||
args = {
|
||||
"path": str(self.addon.path_location),
|
||||
"tag": f"{self.addon.image}:{version}",
|
||||
"pull": True,
|
||||
"forcerm": True,
|
||||
"squash": self.squash,
|
||||
"labels": {
|
||||
"io.hass.version": version,
|
||||
"io.hass.arch": self.sys_arch.default,
|
||||
"io.hass.type": META_ADDON,
|
||||
"io.hass.name": self._fix_label("name"),
|
||||
"io.hass.description": self._fix_label("description"),
|
||||
},
|
||||
"buildargs": {
|
||||
"BUILD_FROM": self.base_image,
|
||||
"BUILD_VERSION": version,
|
||||
"BUILD_ARCH": self.sys_arch.default,
|
||||
**self.additional_args,
|
||||
},
|
||||
}
|
||||
|
||||
if self.addon.url:
|
||||
args["labels"]["io.hass.url"] = self.addon.url
|
||||
|
||||
return args
|
||||
|
||||
def _fix_label(self, label_name: str) -> str:
|
||||
"""Remove characters they are not supported."""
|
||||
label = getattr(self.addon, label_name, "")
|
||||
return label.replace("'", "")
|
73
supervisor/addons/data.py
Normal file
73
supervisor/addons/data.py
Normal file
@@ -0,0 +1,73 @@
|
||||
"""Init file for Supervisor add-on data."""
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from ..const import (
|
||||
ATTR_IMAGE,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_USER,
|
||||
ATTR_VERSION,
|
||||
FILE_HASSIO_ADDONS,
|
||||
)
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from ..utils.json import JsonConfig
|
||||
from ..store.addon import AddonStore
|
||||
from .addon import Addon
|
||||
from .validate import SCHEMA_ADDONS_FILE
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
Config = Dict[str, Any]
|
||||
|
||||
|
||||
class AddonsData(JsonConfig, CoreSysAttributes):
|
||||
"""Hold data for installed Add-ons inside Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize data holder."""
|
||||
super().__init__(FILE_HASSIO_ADDONS, SCHEMA_ADDONS_FILE)
|
||||
self.coresys: CoreSys = coresys
|
||||
|
||||
@property
|
||||
def user(self):
|
||||
"""Return local add-on user data."""
|
||||
return self._data[ATTR_USER]
|
||||
|
||||
@property
|
||||
def system(self):
|
||||
"""Return local add-on data."""
|
||||
return self._data[ATTR_SYSTEM]
|
||||
|
||||
def install(self, addon: AddonStore) -> None:
|
||||
"""Set addon as installed."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: addon.version,
|
||||
ATTR_IMAGE: addon.image,
|
||||
}
|
||||
self.save_data()
|
||||
|
||||
def uninstall(self, addon: Addon) -> None:
|
||||
"""Set add-on as uninstalled."""
|
||||
self.system.pop(addon.slug, None)
|
||||
self.user.pop(addon.slug, None)
|
||||
self.save_data()
|
||||
|
||||
def update(self, addon: AddonStore) -> None:
|
||||
"""Update version of add-on."""
|
||||
self.system[addon.slug] = deepcopy(addon.data)
|
||||
self.user[addon.slug].update(
|
||||
{ATTR_VERSION: addon.version, ATTR_IMAGE: addon.image}
|
||||
)
|
||||
self.save_data()
|
||||
|
||||
def restore(self, slug: str, user: Config, system: Config, image: str) -> None:
|
||||
"""Restore data to add-on."""
|
||||
self.user[slug] = deepcopy(user)
|
||||
self.system[slug] = deepcopy(system)
|
||||
|
||||
self.user[slug][ATTR_IMAGE] = image
|
||||
self.save_data()
|
566
supervisor/addons/model.py
Normal file
566
supervisor/addons/model.py
Normal file
@@ -0,0 +1,566 @@
|
||||
"""Init file for Supervisor add-ons."""
|
||||
from pathlib import Path
|
||||
from typing import Any, Awaitable, Dict, List, Optional
|
||||
|
||||
from packaging import version as pkg_version
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_BOOT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INIT,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PANEL_ADMIN,
|
||||
ATTR_PANEL_ICON,
|
||||
ATTR_PANEL_TITLE,
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOT_EXCLUDE,
|
||||
ATTR_STAGE,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STDIN,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WEBUI,
|
||||
SECURITY_DEFAULT,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
AddonStages,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .validate import RE_SERVICE, RE_VOLUME, schema_ui_options, validate_options
|
||||
|
||||
Data = Dict[str, Any]
|
||||
|
||||
|
||||
class AddonModel(CoreSysAttributes):
|
||||
"""Add-on Data layout."""
|
||||
|
||||
slug: str = None
|
||||
|
||||
@property
|
||||
def data(self) -> Data:
|
||||
"""Return Add-on config/data."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def is_installed(self) -> bool:
|
||||
"""Return True if an add-on is installed."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def is_detached(self) -> bool:
|
||||
"""Return True if add-on is detached."""
|
||||
raise NotImplementedError()
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
return self._available(self.data)
|
||||
|
||||
@property
|
||||
def options(self) -> Dict[str, Any]:
|
||||
"""Return options with local changes."""
|
||||
return self.data[ATTR_OPTIONS]
|
||||
|
||||
@property
|
||||
def boot(self) -> bool:
|
||||
"""Return boot config with prio local settings."""
|
||||
return self.data[ATTR_BOOT]
|
||||
|
||||
@property
|
||||
def auto_update(self) -> Optional[bool]:
|
||||
"""Return if auto update is enable."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of add-on."""
|
||||
return self.data[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def hostname(self) -> str:
|
||||
"""Return slug/id of add-on."""
|
||||
return self.slug.replace("_", "-")
|
||||
|
||||
@property
|
||||
def dns(self) -> List[str]:
|
||||
"""Return list of DNS name for that add-on."""
|
||||
return []
|
||||
|
||||
@property
|
||||
def timeout(self) -> int:
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self.data[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def uuid(self) -> Optional[str]:
|
||||
"""Return an API token for this add-on."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def supervisor_token(self) -> Optional[str]:
|
||||
"""Return access token for Supervisor API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_token(self) -> Optional[str]:
|
||||
"""Return access token for Supervisor API."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def ingress_entry(self) -> Optional[str]:
|
||||
"""Return ingress external URL."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def description(self) -> str:
|
||||
"""Return description of add-on."""
|
||||
return self.data[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self) -> Optional[str]:
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, "README.md")
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
with readme.open("r") as readme_file:
|
||||
return readme_file.read()
|
||||
|
||||
@property
|
||||
def repository(self) -> str:
|
||||
"""Return repository of add-on."""
|
||||
return self.data[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Return latest version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def version(self) -> str:
|
||||
"""Return version of add-on."""
|
||||
return self.data[ATTR_VERSION]
|
||||
|
||||
@property
|
||||
def protected(self) -> bool:
|
||||
"""Return if add-on is in protected mode."""
|
||||
return True
|
||||
|
||||
@property
|
||||
def startup(self) -> Optional[str]:
|
||||
"""Return startup type of add-on."""
|
||||
return self.data.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def advanced(self) -> bool:
|
||||
"""Return advanced mode of add-on."""
|
||||
return self.data[ATTR_ADVANCED]
|
||||
|
||||
@property
|
||||
def stage(self) -> AddonStages:
|
||||
"""Return stage mode of add-on."""
|
||||
return self.data[ATTR_STAGE]
|
||||
|
||||
@property
|
||||
def services_role(self) -> Dict[str, str]:
|
||||
"""Return dict of services with rights."""
|
||||
services_list = self.data.get(ATTR_SERVICES, [])
|
||||
|
||||
services = {}
|
||||
for data in services_list:
|
||||
service = RE_SERVICE.match(data)
|
||||
services[service.group("service")] = service.group("rights")
|
||||
|
||||
return services
|
||||
|
||||
@property
|
||||
def discovery(self) -> List[str]:
|
||||
"""Return list of discoverable components/platforms."""
|
||||
return self.data.get(ATTR_DISCOVERY, [])
|
||||
|
||||
@property
|
||||
def ports_description(self) -> Optional[Dict[str, str]]:
|
||||
"""Return descriptions of ports."""
|
||||
return self.data.get(ATTR_PORTS_DESCRIPTION)
|
||||
|
||||
@property
|
||||
def ports(self) -> Optional[Dict[str, Optional[int]]]:
|
||||
"""Return ports of add-on."""
|
||||
return self.data.get(ATTR_PORTS)
|
||||
|
||||
@property
|
||||
def ingress_url(self) -> Optional[str]:
|
||||
"""Return URL to ingress url."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def webui(self) -> Optional[str]:
|
||||
"""Return URL to webui or None."""
|
||||
return self.data.get(ATTR_WEBUI)
|
||||
|
||||
@property
|
||||
def ingress_port(self) -> Optional[int]:
|
||||
"""Return Ingress port."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def panel_icon(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data[ATTR_PANEL_ICON]
|
||||
|
||||
@property
|
||||
def panel_title(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data.get(ATTR_PANEL_TITLE, self.name)
|
||||
|
||||
@property
|
||||
def panel_admin(self) -> str:
|
||||
"""Return panel icon for Ingress frame."""
|
||||
return self.data[ATTR_PANEL_ADMIN]
|
||||
|
||||
@property
|
||||
def host_network(self) -> bool:
|
||||
"""Return True if add-on run on host network."""
|
||||
return self.data[ATTR_HOST_NETWORK]
|
||||
|
||||
@property
|
||||
def host_pid(self) -> bool:
|
||||
"""Return True if add-on run on host PID namespace."""
|
||||
return self.data[ATTR_HOST_PID]
|
||||
|
||||
@property
|
||||
def host_ipc(self) -> bool:
|
||||
"""Return True if add-on run on host IPC namespace."""
|
||||
return self.data[ATTR_HOST_IPC]
|
||||
|
||||
@property
|
||||
def host_dbus(self) -> bool:
|
||||
"""Return True if add-on run on host D-BUS."""
|
||||
return self.data[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self) -> Optional[List[str]]:
|
||||
"""Return devices of add-on."""
|
||||
return self.data.get(ATTR_DEVICES, [])
|
||||
|
||||
@property
|
||||
def auto_uart(self) -> bool:
|
||||
"""Return True if we should map all UART device."""
|
||||
return self.data[ATTR_AUTO_UART]
|
||||
|
||||
@property
|
||||
def tmpfs(self) -> Optional[str]:
|
||||
"""Return tmpfs of add-on."""
|
||||
return self.data.get(ATTR_TMPFS)
|
||||
|
||||
@property
|
||||
def environment(self) -> Optional[Dict[str, str]]:
|
||||
"""Return environment of add-on."""
|
||||
return self.data.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@property
|
||||
def privileged(self) -> List[str]:
|
||||
"""Return list of privilege."""
|
||||
return self.data.get(ATTR_PRIVILEGED, [])
|
||||
|
||||
@property
|
||||
def apparmor(self) -> str:
|
||||
"""Return True if AppArmor is enabled."""
|
||||
if not self.data.get(ATTR_APPARMOR):
|
||||
return SECURITY_DISABLE
|
||||
elif self.sys_host.apparmor.exists(self.slug):
|
||||
return SECURITY_PROFILE
|
||||
return SECURITY_DEFAULT
|
||||
|
||||
@property
|
||||
def legacy(self) -> bool:
|
||||
"""Return if the add-on don't support Home Assistant labels."""
|
||||
return self.data[ATTR_LEGACY]
|
||||
|
||||
@property
|
||||
def access_docker_api(self) -> bool:
|
||||
"""Return if the add-on need read-only Docker API access."""
|
||||
return self.data[ATTR_DOCKER_API]
|
||||
|
||||
@property
|
||||
def access_hassio_api(self) -> bool:
|
||||
"""Return True if the add-on access to Supervisor REASTful API."""
|
||||
return self.data[ATTR_HASSIO_API]
|
||||
|
||||
@property
|
||||
def access_homeassistant_api(self) -> bool:
|
||||
"""Return True if the add-on access to Home Assistant API proxy."""
|
||||
return self.data[ATTR_HOMEASSISTANT_API]
|
||||
|
||||
@property
|
||||
def hassio_role(self) -> str:
|
||||
"""Return Supervisor role for API."""
|
||||
return self.data[ATTR_HASSIO_ROLE]
|
||||
|
||||
@property
|
||||
def snapshot_exclude(self) -> List[str]:
|
||||
"""Return Exclude list for snapshot."""
|
||||
return self.data.get(ATTR_SNAPSHOT_EXCLUDE, [])
|
||||
|
||||
@property
|
||||
def default_init(self) -> bool:
|
||||
"""Return True if the add-on have no own init."""
|
||||
return self.data[ATTR_INIT]
|
||||
|
||||
@property
|
||||
def with_stdin(self) -> bool:
|
||||
"""Return True if the add-on access use stdin input."""
|
||||
return self.data[ATTR_STDIN]
|
||||
|
||||
@property
|
||||
def with_ingress(self) -> bool:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return self.data[ATTR_INGRESS]
|
||||
|
||||
@property
|
||||
def ingress_panel(self) -> Optional[bool]:
|
||||
"""Return True if the add-on access support ingress."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def with_gpio(self) -> bool:
|
||||
"""Return True if the add-on access to GPIO interface."""
|
||||
return self.data[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_udev(self) -> bool:
|
||||
"""Return True if the add-on have his own udev."""
|
||||
return self.data[ATTR_UDEV]
|
||||
|
||||
@property
|
||||
def with_kernel_modules(self) -> bool:
|
||||
"""Return True if the add-on access to kernel modules."""
|
||||
return self.data[ATTR_KERNEL_MODULES]
|
||||
|
||||
@property
|
||||
def with_full_access(self) -> bool:
|
||||
"""Return True if the add-on want full access to hardware."""
|
||||
return self.data[ATTR_FULL_ACCESS]
|
||||
|
||||
@property
|
||||
def with_devicetree(self) -> bool:
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self.data[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def access_auth_api(self) -> bool:
|
||||
"""Return True if the add-on access to login/auth backend."""
|
||||
return self.data[ATTR_AUTH_API]
|
||||
|
||||
@property
|
||||
def with_audio(self) -> bool:
|
||||
"""Return True if the add-on access to audio."""
|
||||
return self.data[ATTR_AUDIO]
|
||||
|
||||
@property
|
||||
def with_video(self) -> bool:
|
||||
"""Return True if the add-on access to video."""
|
||||
return self.data[ATTR_VIDEO]
|
||||
|
||||
@property
|
||||
def homeassistant_version(self) -> Optional[str]:
|
||||
"""Return min Home Assistant version they needed by Add-on."""
|
||||
return self.data.get(ATTR_HOMEASSISTANT)
|
||||
|
||||
@property
|
||||
def url(self) -> Optional[str]:
|
||||
"""Return URL of add-on."""
|
||||
return self.data.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def with_icon(self) -> bool:
|
||||
"""Return True if an icon exists."""
|
||||
return self.path_icon.exists()
|
||||
|
||||
@property
|
||||
def with_logo(self) -> bool:
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def with_changelog(self) -> bool:
|
||||
"""Return True if a changelog exists."""
|
||||
return self.path_changelog.exists()
|
||||
|
||||
@property
|
||||
def with_documentation(self) -> bool:
|
||||
"""Return True if a documentation exists."""
|
||||
return self.path_documentation.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self) -> List[str]:
|
||||
"""Return list of supported arch."""
|
||||
return self.data[ATTR_ARCH]
|
||||
|
||||
@property
|
||||
def supported_machine(self) -> List[str]:
|
||||
"""Return list of supported machine."""
|
||||
return self.data.get(ATTR_MACHINE, [])
|
||||
|
||||
@property
|
||||
def image(self) -> str:
|
||||
"""Generate image name from data."""
|
||||
return self._image(self.data)
|
||||
|
||||
@property
|
||||
def need_build(self) -> bool:
|
||||
"""Return True if this add-on need a local build."""
|
||||
return ATTR_IMAGE not in self.data
|
||||
|
||||
@property
|
||||
def map_volumes(self) -> Dict[str, str]:
|
||||
"""Return a dict of {volume: policy} from add-on."""
|
||||
volumes = {}
|
||||
for volume in self.data[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
volumes[result.group(1)] = result.group(2) or "ro"
|
||||
|
||||
return volumes
|
||||
|
||||
@property
|
||||
def path_location(self) -> Path:
|
||||
"""Return path to this add-on."""
|
||||
return Path(self.data[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_icon(self) -> Path:
|
||||
"""Return path to add-on icon."""
|
||||
return Path(self.path_location, "icon.png")
|
||||
|
||||
@property
|
||||
def path_logo(self) -> Path:
|
||||
"""Return path to add-on logo."""
|
||||
return Path(self.path_location, "logo.png")
|
||||
|
||||
@property
|
||||
def path_changelog(self) -> Path:
|
||||
"""Return path to add-on changelog."""
|
||||
return Path(self.path_location, "CHANGELOG.md")
|
||||
|
||||
@property
|
||||
def path_documentation(self) -> Path:
|
||||
"""Return path to add-on changelog."""
|
||||
return Path(self.path_location, "DOCS.md")
|
||||
|
||||
@property
|
||||
def path_apparmor(self) -> Path:
|
||||
"""Return path to custom AppArmor profile."""
|
||||
return Path(self.path_location, "apparmor.txt")
|
||||
|
||||
@property
|
||||
def schema(self) -> vol.Schema:
|
||||
"""Create a schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(self.coresys, raw_schema)))
|
||||
|
||||
@property
|
||||
def schema_ui(self) -> Optional[List[Dict[str, Any]]]:
|
||||
"""Create a UI schema for add-on options."""
|
||||
raw_schema = self.data[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return None
|
||||
return schema_ui_options(raw_schema)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compaired add-on objects."""
|
||||
if not isinstance(other, AddonModel):
|
||||
return False
|
||||
return self.slug == other.slug
|
||||
|
||||
def _available(self, config) -> bool:
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
# Architecture
|
||||
if not self.sys_arch.is_supported(config[ATTR_ARCH]):
|
||||
return False
|
||||
|
||||
# Machine / Hardware
|
||||
machine = config.get(ATTR_MACHINE)
|
||||
if machine and self.sys_machine not in machine:
|
||||
return False
|
||||
|
||||
# Home Assistant
|
||||
version = config.get(ATTR_HOMEASSISTANT) or self.sys_homeassistant.version
|
||||
if pkg_version.parse(self.sys_homeassistant.version) < pkg_version.parse(
|
||||
version
|
||||
):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def _image(self, config) -> str:
|
||||
"""Generate image name from data."""
|
||||
# Repository with Dockerhub images
|
||||
if ATTR_IMAGE in config:
|
||||
arch = self.sys_arch.match(config[ATTR_ARCH])
|
||||
return config[ATTR_IMAGE].format(arch=arch)
|
||||
|
||||
# local build
|
||||
return f"{config[ATTR_REPOSITORY]}/{self.sys_arch.default}-addon-{config[ATTR_SLUG]}"
|
||||
|
||||
def install(self) -> Awaitable[None]:
|
||||
"""Install this add-on."""
|
||||
return self.sys_addons.install(self.slug)
|
||||
|
||||
def uninstall(self) -> Awaitable[None]:
|
||||
"""Uninstall this add-on."""
|
||||
return self.sys_addons.uninstall(self.slug)
|
||||
|
||||
def update(self) -> Awaitable[None]:
|
||||
"""Update this add-on."""
|
||||
return self.sys_addons.update(self.slug)
|
||||
|
||||
def rebuild(self) -> Awaitable[None]:
|
||||
"""Rebuild this add-on."""
|
||||
return self.sys_addons.rebuild(self.slug)
|
101
supervisor/addons/utils.py
Normal file
101
supervisor/addons/utils.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Util add-ons functions."""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ..const import (
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
ROLE_ADMIN,
|
||||
ROLE_MANAGER,
|
||||
SECURITY_DISABLE,
|
||||
SECURITY_PROFILE,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .model import AddonModel
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def rating_security(addon: AddonModel) -> int:
|
||||
"""Return 1-6 for security rating.
|
||||
|
||||
1 = not secure
|
||||
6 = high secure
|
||||
"""
|
||||
rating = 5
|
||||
|
||||
# AppArmor
|
||||
if addon.apparmor == SECURITY_DISABLE:
|
||||
rating += -1
|
||||
elif addon.apparmor == SECURITY_PROFILE:
|
||||
rating += 1
|
||||
|
||||
# Home Assistant Login & Ingress
|
||||
if addon.with_ingress:
|
||||
rating += 2
|
||||
elif addon.access_auth_api:
|
||||
rating += 1
|
||||
|
||||
# Privileged options
|
||||
if any(
|
||||
privilege in addon.privileged
|
||||
for privilege in (
|
||||
PRIVILEGED_NET_ADMIN,
|
||||
PRIVILEGED_SYS_ADMIN,
|
||||
PRIVILEGED_SYS_RAWIO,
|
||||
PRIVILEGED_SYS_PTRACE,
|
||||
PRIVILEGED_SYS_MODULE,
|
||||
PRIVILEGED_DAC_READ_SEARCH,
|
||||
)
|
||||
):
|
||||
rating += -1
|
||||
|
||||
# API Supervisor role
|
||||
if addon.hassio_role == ROLE_MANAGER:
|
||||
rating += -1
|
||||
elif addon.hassio_role == ROLE_ADMIN:
|
||||
rating += -2
|
||||
|
||||
# Not secure Networking
|
||||
if addon.host_network:
|
||||
rating += -1
|
||||
|
||||
# Insecure PID namespace
|
||||
if addon.host_pid:
|
||||
rating += -2
|
||||
|
||||
# Full Access
|
||||
if addon.with_full_access:
|
||||
rating += -2
|
||||
|
||||
# Docker Access
|
||||
if addon.access_docker_api:
|
||||
rating = 1
|
||||
|
||||
return max(min(6, rating), 1)
|
||||
|
||||
|
||||
async def remove_data(folder: Path) -> None:
|
||||
"""Remove folder and reset privileged."""
|
||||
try:
|
||||
proc = await asyncio.create_subprocess_exec(
|
||||
"rm", "-rf", str(folder), stdout=asyncio.subprocess.DEVNULL
|
||||
)
|
||||
|
||||
_, error_msg = await proc.communicate()
|
||||
except OSError as err:
|
||||
error_msg = str(err)
|
||||
else:
|
||||
if proc.returncode == 0:
|
||||
return
|
||||
|
||||
_LOGGER.error("Can't remove Add-on Data: %s", error_msg)
|
574
supervisor/addons/validate.py
Normal file
574
supervisor/addons/validate.py
Normal file
@@ -0,0 +1,574 @@
|
||||
"""Validate add-ons options schema."""
|
||||
import logging
|
||||
import re
|
||||
import secrets
|
||||
from typing import Any, Dict, List
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ARCH_ALL,
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_ARGS,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_ENVIRONMENT,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_IMAGE,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_TOKEN,
|
||||
ATTR_INIT,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_LEGACY,
|
||||
ATTR_LOCATON,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAP,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PANEL_ADMIN,
|
||||
ATTR_PANEL_ICON,
|
||||
ATTR_PANEL_TITLE,
|
||||
ATTR_PORTS,
|
||||
ATTR_PORTS_DESCRIPTION,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SNAPSHOT_EXCLUDE,
|
||||
ATTR_SQUASH,
|
||||
ATTR_STAGE,
|
||||
ATTR_STARTUP,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_SYSTEM,
|
||||
ATTR_TIMEOUT,
|
||||
ATTR_TMPFS,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_USER,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
PRIVILEGED_ALL,
|
||||
ROLE_ALL,
|
||||
ROLE_DEFAULT,
|
||||
STARTUP_ALL,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES,
|
||||
STATE_STARTED,
|
||||
STATE_STOPPED,
|
||||
AddonStages,
|
||||
)
|
||||
from ..coresys import CoreSys
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
from ..validate import (
|
||||
DOCKER_PORTS,
|
||||
DOCKER_PORTS_DESCRIPTION,
|
||||
network_port,
|
||||
token,
|
||||
uuid_match,
|
||||
)
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt|mysql):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = "str"
|
||||
V_INT = "int"
|
||||
V_FLOAT = "float"
|
||||
V_BOOL = "bool"
|
||||
V_PASSWORD = "password"
|
||||
V_EMAIL = "email"
|
||||
V_URL = "url"
|
||||
V_PORT = "port"
|
||||
V_MATCH = "match"
|
||||
V_LIST = "list"
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|bool|email|url|port"
|
||||
r"|str(?:\((?P<s_min>\d+)?,(?P<s_max>\d+)?\))?"
|
||||
r"|password(?:\((?P<p_min>\d+)?,(?P<p_max>\d+)?\))?"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r"|list\((?P<list>.+)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
_SCHEMA_LENGTH_PARTS = (
|
||||
"i_min",
|
||||
"i_max",
|
||||
"f_min",
|
||||
"f_max",
|
||||
"s_min",
|
||||
"s_max",
|
||||
"p_min",
|
||||
"p_max",
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$"
|
||||
)
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
|
||||
|
||||
MACHINE_ALL = [
|
||||
"intel-nuc",
|
||||
"odroid-c2",
|
||||
"odroid-n2",
|
||||
"odroid-xu",
|
||||
"qemuarm-64",
|
||||
"qemuarm",
|
||||
"qemux86-64",
|
||||
"qemux86",
|
||||
"raspberrypi",
|
||||
"raspberrypi2",
|
||||
"raspberrypi3-64",
|
||||
"raspberrypi3",
|
||||
"raspberrypi4-64",
|
||||
"raspberrypi4",
|
||||
"tinker",
|
||||
]
|
||||
|
||||
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
if value == "before":
|
||||
return STARTUP_SERVICES
|
||||
if value == "after":
|
||||
return STARTUP_APPLICATION
|
||||
return value
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Required(ATTR_STARTUP): vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_INIT, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_ADVANCED, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STAGE, default=AddonStages.STABLE): vol.Coerce(AddonStages),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_PORTS_DESCRIPTION): DOCKER_PORTS_DESCRIPTION,
|
||||
vol.Optional(ATTR_WEBUI): vol.Match(
|
||||
r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): vol.Any(
|
||||
network_port, vol.Equal(0)
|
||||
),
|
||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ICON, default="mdi:puzzle"): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_TITLE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_PANEL_ADMIN, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_UDEV, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS): vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_VIDEO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
||||
vol.Optional(ATTR_SNAPSHOT_EXCLUDE): [vol.Coerce(str)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
[
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{
|
||||
vol.Coerce(str): vol.Any(
|
||||
SCHEMA_ELEMENT, [SCHEMA_ELEMENT]
|
||||
)
|
||||
},
|
||||
)
|
||||
],
|
||||
vol.Schema(
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
)
|
||||
}
|
||||
),
|
||||
False,
|
||||
),
|
||||
vol.Optional(ATTR_IMAGE): vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=10, max=300)
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema(
|
||||
{vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD)}
|
||||
),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema(
|
||||
{vol.Coerce(str): vol.Coerce(str)}
|
||||
),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): uuid_match,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): token,
|
||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(
|
||||
str
|
||||
),
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_INGRESS_PANEL, default=False): vol.Boolean(),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend(
|
||||
{
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDONS_FILE = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_USER, default=dict): {vol.Coerce(str): SCHEMA_ADDON_USER},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {vol.Coerce(str): SCHEMA_ADDON_SYSTEM},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
},
|
||||
extra=vol.REMOVE_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def validate_options(coresys: CoreSys, raw_schema: Dict[str, Any]):
|
||||
"""Validate schema."""
|
||||
|
||||
def validate(struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in raw_schema:
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = _nested_validate_list(coresys, typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = _nested_validate_dict(coresys, typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(coresys, typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, "root")
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(coresys: CoreSys, typ: str, value: Any, key: str):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'")
|
||||
|
||||
# Lookup secret
|
||||
if str(value).startswith("!secret "):
|
||||
secret: str = value.partition(" ")[2]
|
||||
value = coresys.secrets.get(secret)
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Unknown secret {secret}")
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR) or typ.startswith(V_PASSWORD):
|
||||
return vol.All(str(value), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(V_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(V_PORT):
|
||||
return network_port(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group("match"))(str(value))
|
||||
elif typ.startswith(V_LIST):
|
||||
return vol.In(match.group("list").split("|"))(str(value))
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||
|
||||
|
||||
def _nested_validate_list(coresys, typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = _nested_validate_dict(coresys, typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(_single_validate(coresys, typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _nested_validate_dict(coresys, typ, data_dict, key):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(
|
||||
coresys, typ[c_key][0], c_value, c_key
|
||||
)
|
||||
else:
|
||||
options[c_key] = _single_validate(coresys, typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
||||
|
||||
|
||||
def schema_ui_options(raw_schema: Dict[str, Any]) -> List[Dict[str, Any]]:
|
||||
"""Generate UI schema."""
|
||||
ui_schema = []
|
||||
|
||||
# read options
|
||||
for key, value in raw_schema.items():
|
||||
if isinstance(value, list):
|
||||
# nested value list
|
||||
_nested_ui_list(ui_schema, value, key)
|
||||
elif isinstance(value, dict):
|
||||
# nested value dict
|
||||
_nested_ui_dict(ui_schema, value, key)
|
||||
else:
|
||||
# normal value
|
||||
_single_ui_option(ui_schema, value, key)
|
||||
|
||||
return ui_schema
|
||||
|
||||
|
||||
def _single_ui_option(
|
||||
ui_schema: List[Dict[str, Any]], value: str, key: str, multiple: bool = False
|
||||
) -> None:
|
||||
"""Validate a single element."""
|
||||
ui_node = {"name": key}
|
||||
|
||||
# If multiple
|
||||
if multiple:
|
||||
ui_node["multiple"] = True
|
||||
|
||||
# Parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(value)
|
||||
|
||||
# Prepare range
|
||||
for group_name in _SCHEMA_LENGTH_PARTS:
|
||||
group_value = match.group(group_name)
|
||||
if not group_value:
|
||||
continue
|
||||
if group_name[2:] == "min":
|
||||
ui_node["lengthMin"] = float(group_value)
|
||||
elif group_name[2:] == "max":
|
||||
ui_node["lengthMax"] = float(group_value)
|
||||
|
||||
# If required
|
||||
if value.endswith("?"):
|
||||
ui_node["optional"] = True
|
||||
else:
|
||||
ui_node["required"] = True
|
||||
|
||||
# Data types
|
||||
if value.startswith(V_STR):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_PASSWORD):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "password"
|
||||
elif value.startswith(V_INT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_FLOAT):
|
||||
ui_node["type"] = "float"
|
||||
elif value.startswith(V_BOOL):
|
||||
ui_node["type"] = "boolean"
|
||||
elif value.startswith(V_EMAIL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "email"
|
||||
elif value.startswith(V_URL):
|
||||
ui_node["type"] = "string"
|
||||
ui_node["format"] = "url"
|
||||
elif value.startswith(V_PORT):
|
||||
ui_node["type"] = "integer"
|
||||
elif value.startswith(V_MATCH):
|
||||
ui_node["type"] = "string"
|
||||
elif value.startswith(V_LIST):
|
||||
ui_node["type"] = "select"
|
||||
ui_node["options"] = match.group("list").split("|")
|
||||
|
||||
ui_schema.append(ui_node)
|
||||
|
||||
|
||||
def _nested_ui_list(
|
||||
ui_schema: List[Dict[str, Any]], option_list: List[Any], key: str
|
||||
) -> None:
|
||||
"""UI nested list items."""
|
||||
try:
|
||||
element = option_list[0]
|
||||
except IndexError:
|
||||
_LOGGER.error("Invalid schema %s", key)
|
||||
return
|
||||
|
||||
if isinstance(element, dict):
|
||||
_nested_ui_dict(ui_schema, element, key, multiple=True)
|
||||
else:
|
||||
_single_ui_option(ui_schema, element, key, multiple=True)
|
||||
|
||||
|
||||
def _nested_ui_dict(
|
||||
ui_schema: List[Dict[str, Any]],
|
||||
option_dict: Dict[str, Any],
|
||||
key: str,
|
||||
multiple: bool = False,
|
||||
) -> None:
|
||||
"""UI nested dict items."""
|
||||
ui_node = {"name": key, "type": "schema", "optional": True, "multiple": multiple}
|
||||
|
||||
nested_schema = []
|
||||
for c_key, c_value in option_dict.items():
|
||||
# Nested?
|
||||
if isinstance(c_value, list):
|
||||
_nested_ui_list(nested_schema, c_value, c_key)
|
||||
else:
|
||||
_single_ui_option(nested_schema, c_value, c_key)
|
||||
|
||||
ui_node["schema"] = nested_schema
|
||||
ui_schema.append(ui_node)
|
378
supervisor/api/__init__.py
Normal file
378
supervisor/api/__init__.py
Normal file
@@ -0,0 +1,378 @@
|
||||
"""Init file for Supervisor RESTful API."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from ..coresys import CoreSys, CoreSysAttributes
|
||||
from .addons import APIAddons
|
||||
from .audio import APIAudio
|
||||
from .auth import APIAuth
|
||||
from .cli import APICli
|
||||
from .discovery import APIDiscovery
|
||||
from .dns import APICoreDNS
|
||||
from .hardware import APIHardware
|
||||
from .os import APIOS
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .host import APIHost
|
||||
from .info import APIInfo
|
||||
from .ingress import APIIngress
|
||||
from .proxy import APIProxy
|
||||
from .security import SecurityMiddleware
|
||||
from .services import APIServices
|
||||
from .snapshots import APISnapshots
|
||||
from .supervisor import APISupervisor
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
MAX_CLIENT_SIZE: int = 1024 ** 2 * 16
|
||||
|
||||
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Supervisor."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys: CoreSys = coresys
|
||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
||||
self.webapp: web.Application = web.Application(
|
||||
client_max_size=MAX_CLIENT_SIZE,
|
||||
middlewares=[self.security.token_validation],
|
||||
)
|
||||
|
||||
# service stuff
|
||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
||||
self._site: Optional[web.TCPSite] = None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
self._register_os()
|
||||
self._register_cli()
|
||||
self._register_hardware()
|
||||
self._register_homeassistant()
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_ingress()
|
||||
self._register_snapshots()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
self._register_info()
|
||||
self._register_auth()
|
||||
self._register_dns()
|
||||
self._register_audio()
|
||||
|
||||
def _register_host(self) -> None:
|
||||
"""Register hostcontrol functions."""
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/host/info", api_host.info),
|
||||
web.post("/host/reboot", api_host.reboot),
|
||||
web.post("/host/shutdown", api_host.shutdown),
|
||||
web.post("/host/reload", api_host.reload),
|
||||
web.post("/host/options", api_host.options),
|
||||
web.get("/host/services", api_host.services),
|
||||
web.post("/host/services/{service}/stop", api_host.service_stop),
|
||||
web.post("/host/services/{service}/start", api_host.service_start),
|
||||
web.post("/host/services/{service}/restart", api_host.service_restart),
|
||||
web.post("/host/services/{service}/reload", api_host.service_reload),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_os(self) -> None:
|
||||
"""Register OS functions."""
|
||||
api_os = APIOS()
|
||||
api_os.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/os/info", api_os.info),
|
||||
web.post("/os/update", api_os.update),
|
||||
web.post("/os/config/sync", api_os.config_sync),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_cli(self) -> None:
|
||||
"""Register HA cli functions."""
|
||||
api_cli = APICli()
|
||||
api_cli.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/cli/info", api_cli.info),
|
||||
web.get("/cli/stats", api_cli.stats),
|
||||
web.post("/cli/update", api_cli.update),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_hardware(self) -> None:
|
||||
"""Register hardware functions."""
|
||||
api_hardware = APIHardware()
|
||||
api_hardware.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/hardware/info", api_hardware.info),
|
||||
web.get("/hardware/audio", api_hardware.audio),
|
||||
web.post("/hardware/trigger", api_hardware.trigger),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_info(self) -> None:
|
||||
"""Register info functions."""
|
||||
api_info = APIInfo()
|
||||
api_info.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([web.get("/info", api_info.info)])
|
||||
|
||||
def _register_auth(self) -> None:
|
||||
"""Register auth functions."""
|
||||
api_auth = APIAuth()
|
||||
api_auth.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[web.post("/auth", api_auth.auth), web.post("/auth/reset", api_auth.reset)]
|
||||
)
|
||||
|
||||
def _register_supervisor(self) -> None:
|
||||
"""Register Supervisor functions."""
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/supervisor/ping", api_supervisor.ping),
|
||||
web.get("/supervisor/info", api_supervisor.info),
|
||||
web.get("/supervisor/stats", api_supervisor.stats),
|
||||
web.get("/supervisor/logs", api_supervisor.logs),
|
||||
web.post("/supervisor/update", api_supervisor.update),
|
||||
web.post("/supervisor/reload", api_supervisor.reload),
|
||||
web.post("/supervisor/options", api_supervisor.options),
|
||||
web.post("/supervisor/repair", api_supervisor.repair),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_homeassistant(self) -> None:
|
||||
"""Register Home Assistant functions."""
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/core/info", api_hass.info),
|
||||
web.get("/core/logs", api_hass.logs),
|
||||
web.get("/core/stats", api_hass.stats),
|
||||
web.post("/core/options", api_hass.options),
|
||||
web.post("/core/update", api_hass.update),
|
||||
web.post("/core/restart", api_hass.restart),
|
||||
web.post("/core/stop", api_hass.stop),
|
||||
web.post("/core/start", api_hass.start),
|
||||
web.post("/core/check", api_hass.check),
|
||||
web.post("/core/rebuild", api_hass.rebuild),
|
||||
# Remove with old Supervisor fallback
|
||||
web.get("/homeassistant/info", api_hass.info),
|
||||
web.get("/homeassistant/logs", api_hass.logs),
|
||||
web.get("/homeassistant/stats", api_hass.stats),
|
||||
web.post("/homeassistant/options", api_hass.options),
|
||||
web.post("/homeassistant/update", api_hass.update),
|
||||
web.post("/homeassistant/restart", api_hass.restart),
|
||||
web.post("/homeassistant/stop", api_hass.stop),
|
||||
web.post("/homeassistant/start", api_hass.start),
|
||||
web.post("/homeassistant/check", api_hass.check),
|
||||
web.post("/homeassistant/rebuild", api_hass.rebuild),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_proxy(self) -> None:
|
||||
"""Register Home Assistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/core/api/websocket", api_proxy.websocket),
|
||||
web.get("/core/websocket", api_proxy.websocket),
|
||||
web.get("/core/api/stream", api_proxy.stream),
|
||||
web.post("/core/api/{path:.+}", api_proxy.api),
|
||||
web.get("/core/api/{path:.+}", api_proxy.api),
|
||||
web.get("/core/api/", api_proxy.api),
|
||||
# Remove with old Supervisor fallback
|
||||
web.get("/homeassistant/api/websocket", api_proxy.websocket),
|
||||
web.get("/homeassistant/websocket", api_proxy.websocket),
|
||||
web.get("/homeassistant/api/stream", api_proxy.stream),
|
||||
web.post("/homeassistant/api/{path:.+}", api_proxy.api),
|
||||
web.get("/homeassistant/api/{path:.+}", api_proxy.api),
|
||||
web.get("/homeassistant/api/", api_proxy.api),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_addons(self) -> None:
|
||||
"""Register Add-on functions."""
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/addons", api_addons.list),
|
||||
web.post("/addons/reload", api_addons.reload),
|
||||
web.get("/addons/{addon}/info", api_addons.info),
|
||||
web.post("/addons/{addon}/install", api_addons.install),
|
||||
web.post("/addons/{addon}/uninstall", api_addons.uninstall),
|
||||
web.post("/addons/{addon}/start", api_addons.start),
|
||||
web.post("/addons/{addon}/stop", api_addons.stop),
|
||||
web.post("/addons/{addon}/restart", api_addons.restart),
|
||||
web.post("/addons/{addon}/update", api_addons.update),
|
||||
web.post("/addons/{addon}/options", api_addons.options),
|
||||
web.post("/addons/{addon}/rebuild", api_addons.rebuild),
|
||||
web.get("/addons/{addon}/logs", api_addons.logs),
|
||||
web.get("/addons/{addon}/icon", api_addons.icon),
|
||||
web.get("/addons/{addon}/logo", api_addons.logo),
|
||||
web.get("/addons/{addon}/changelog", api_addons.changelog),
|
||||
web.get("/addons/{addon}/documentation", api_addons.documentation),
|
||||
web.post("/addons/{addon}/stdin", api_addons.stdin),
|
||||
web.post("/addons/{addon}/security", api_addons.security),
|
||||
web.get("/addons/{addon}/stats", api_addons.stats),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_ingress(self) -> None:
|
||||
"""Register Ingress functions."""
|
||||
api_ingress = APIIngress()
|
||||
api_ingress.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.post("/ingress/session", api_ingress.create_session),
|
||||
web.get("/ingress/panels", api_ingress.panels),
|
||||
web.view("/ingress/{token}/{path:.*}", api_ingress.handler),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_snapshots(self) -> None:
|
||||
"""Register snapshots functions."""
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/snapshots", api_snapshots.list),
|
||||
web.post("/snapshots/reload", api_snapshots.reload),
|
||||
web.post("/snapshots/new/full", api_snapshots.snapshot_full),
|
||||
web.post("/snapshots/new/partial", api_snapshots.snapshot_partial),
|
||||
web.post("/snapshots/new/upload", api_snapshots.upload),
|
||||
web.get("/snapshots/{snapshot}/info", api_snapshots.info),
|
||||
web.post("/snapshots/{snapshot}/remove", api_snapshots.remove),
|
||||
web.post(
|
||||
"/snapshots/{snapshot}/restore/full", api_snapshots.restore_full
|
||||
),
|
||||
web.post(
|
||||
"/snapshots/{snapshot}/restore/partial",
|
||||
api_snapshots.restore_partial,
|
||||
),
|
||||
web.get("/snapshots/{snapshot}/download", api_snapshots.download),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_services(self) -> None:
|
||||
"""Register services functions."""
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/services", api_services.list),
|
||||
web.get("/services/{service}", api_services.get_service),
|
||||
web.post("/services/{service}", api_services.set_service),
|
||||
web.delete("/services/{service}", api_services.del_service),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_discovery(self) -> None:
|
||||
"""Register discovery functions."""
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/discovery", api_discovery.list),
|
||||
web.get("/discovery/{uuid}", api_discovery.get_discovery),
|
||||
web.delete("/discovery/{uuid}", api_discovery.del_discovery),
|
||||
web.post("/discovery", api_discovery.set_discovery),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_dns(self) -> None:
|
||||
"""Register DNS functions."""
|
||||
api_dns = APICoreDNS()
|
||||
api_dns.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/dns/info", api_dns.info),
|
||||
web.get("/dns/stats", api_dns.stats),
|
||||
web.get("/dns/logs", api_dns.logs),
|
||||
web.post("/dns/update", api_dns.update),
|
||||
web.post("/dns/options", api_dns.options),
|
||||
web.post("/dns/restart", api_dns.restart),
|
||||
web.post("/dns/reset", api_dns.reset),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_audio(self) -> None:
|
||||
"""Register Audio functions."""
|
||||
api_audio = APIAudio()
|
||||
api_audio.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes(
|
||||
[
|
||||
web.get("/audio/info", api_audio.info),
|
||||
web.get("/audio/stats", api_audio.stats),
|
||||
web.get("/audio/logs", api_audio.logs),
|
||||
web.post("/audio/update", api_audio.update),
|
||||
web.post("/audio/restart", api_audio.restart),
|
||||
web.post("/audio/reload", api_audio.reload),
|
||||
web.post("/audio/profile", api_audio.set_profile),
|
||||
web.post("/audio/volume/{source}/application", api_audio.set_volume),
|
||||
web.post("/audio/volume/{source}", api_audio.set_volume),
|
||||
web.post("/audio/mute/{source}/application", api_audio.set_mute),
|
||||
web.post("/audio/mute/{source}", api_audio.set_mute),
|
||||
web.post("/audio/default/{source}", api_audio.set_default),
|
||||
]
|
||||
)
|
||||
|
||||
def _register_panel(self) -> None:
|
||||
"""Register panel for Home Assistant."""
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
self.webapp.add_routes([web.static("/app", panel_dir)])
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Run RESTful API webserver."""
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5
|
||||
)
|
||||
|
||||
try:
|
||||
await self._site.start()
|
||||
except OSError as err:
|
||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s", err)
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop RESTful API webserver."""
|
||||
if not self._site:
|
||||
return
|
||||
|
||||
# Shutdown running API
|
||||
await self._site.stop()
|
||||
await self._runner.cleanup()
|
||||
|
||||
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
460
supervisor/api/addons.py
Normal file
460
supervisor/api/addons.py
Normal file
@@ -0,0 +1,460 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict, List
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons import AnyAddon
|
||||
from ..addons.addon import Addon
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_ADDONS,
|
||||
ATTR_ADVANCED,
|
||||
ATTR_APPARMOR,
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_AUTH_API,
|
||||
ATTR_AUTO_UPDATE,
|
||||
ATTR_AVAILABLE,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
ATTR_BUILD,
|
||||
ATTR_CHANGELOG,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_DETACHED,
|
||||
ATTR_DEVICES,
|
||||
ATTR_DEVICETREE,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_DNS,
|
||||
ATTR_DOCKER_API,
|
||||
ATTR_DOCUMENTATION,
|
||||
ATTR_FULL_ACCESS,
|
||||
ATTR_GPIO,
|
||||
ATTR_HASSIO_API,
|
||||
ATTR_HASSIO_ROLE,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOMEASSISTANT_API,
|
||||
ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC,
|
||||
ATTR_HOST_NETWORK,
|
||||
ATTR_HOST_PID,
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_ICON,
|
||||
ATTR_INGRESS,
|
||||
ATTR_INGRESS_ENTRY,
|
||||
ATTR_INGRESS_PANEL,
|
||||
ATTR_INGRESS_PORT,
|
||||
ATTR_INGRESS_URL,
|
||||
ATTR_INSTALLED,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_KERNEL_MODULES,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_LOGO,
|
||||
ATTR_LONG_DESCRIPTION,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MAINTAINER,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK,
|
||||
ATTR_NETWORK_DESCRIPTION,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_OPTIONS,
|
||||
ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED,
|
||||
ATTR_RATING,
|
||||
ATTR_REPOSITORIES,
|
||||
ATTR_REPOSITORY,
|
||||
ATTR_SCHEMA,
|
||||
ATTR_SERVICES,
|
||||
ATTR_SLUG,
|
||||
ATTR_SOURCE,
|
||||
ATTR_STAGE,
|
||||
ATTR_STATE,
|
||||
ATTR_STDIN,
|
||||
ATTR_UDEV,
|
||||
ATTR_URL,
|
||||
ATTR_VERSION,
|
||||
ATTR_VIDEO,
|
||||
ATTR_WEBUI,
|
||||
BOOT_AUTO,
|
||||
BOOT_MANUAL,
|
||||
CONTENT_TYPE_BINARY,
|
||||
CONTENT_TYPE_PNG,
|
||||
CONTENT_TYPE_TEXT,
|
||||
REQUEST_FROM,
|
||||
STATE_NONE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.stats import DockerStats
|
||||
from ..exceptions import APIError
|
||||
from ..validate import DOCKER_PORTS
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Maybe(DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_INGRESS_PANEL): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SECURITY = vol.Schema({vol.Optional(ATTR_PROTECTED): vol.Boolean()})
|
||||
|
||||
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def _extract_addon(
|
||||
self, request: web.Request, check_installed: bool = True
|
||||
) -> AnyAddon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon_slug: str = request.match_info.get("addon")
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == "self":
|
||||
addon = request.get(REQUEST_FROM)
|
||||
if not isinstance(addon, Addon):
|
||||
raise APIError("Self is not an Addon")
|
||||
return addon
|
||||
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
if not addon:
|
||||
raise APIError("Addon does not exist")
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
raise APIError("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = []
|
||||
for addon in self.sys_addons.all:
|
||||
data_addons.append(
|
||||
{
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_VERSION: addon.latest_version,
|
||||
ATTR_INSTALLED: addon.version if addon.is_installed else None,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
}
|
||||
)
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.sys_store.all:
|
||||
data_repositories.append(
|
||||
{
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
}
|
||||
)
|
||||
|
||||
return {ATTR_ADDONS: data_addons, ATTR_REPOSITORIES: data_repositories}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request: web.Request) -> None:
|
||||
"""Reload all add-on data from store."""
|
||||
await asyncio.shield(self.sys_store.reload())
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return add-on information."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
data = {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_HOSTNAME: addon.hostname,
|
||||
ATTR_DNS: addon.dns,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_ADVANCED: addon.advanced,
|
||||
ATTR_STAGE: addon.stage,
|
||||
ATTR_AUTO_UPDATE: None,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_VERSION: None,
|
||||
ATTR_VERSION_LATEST: addon.latest_version,
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_SCHEMA: addon.schema_ui,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_MACHINE: addon.supported_machine,
|
||||
ATTR_HOMEASSISTANT: addon.homeassistant_version,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_STATE: STATE_NONE,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_NETWORK_DESCRIPTION: addon.ports_description,
|
||||
ATTR_HOST_NETWORK: addon.host_network,
|
||||
ATTR_HOST_PID: addon.host_pid,
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
ATTR_HOST_DBUS: addon.host_dbus,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: _pretty_devices(addon),
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
ATTR_DOCUMENTATION: addon.with_documentation,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_WEBUI: None,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HASSIO_ROLE: addon.hassio_role,
|
||||
ATTR_AUTH_API: addon.access_auth_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_UDEV: addon.with_udev,
|
||||
ATTR_DOCKER_API: addon.access_docker_api,
|
||||
ATTR_VIDEO: addon.with_video,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: None,
|
||||
ATTR_AUDIO_OUTPUT: None,
|
||||
ATTR_SERVICES: _pretty_services(addon),
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
ATTR_IP_ADDRESS: None,
|
||||
ATTR_INGRESS: addon.with_ingress,
|
||||
ATTR_INGRESS_ENTRY: None,
|
||||
ATTR_INGRESS_URL: None,
|
||||
ATTR_INGRESS_PORT: None,
|
||||
ATTR_INGRESS_PANEL: None,
|
||||
}
|
||||
|
||||
if addon.is_installed:
|
||||
data.update(
|
||||
{
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_INGRESS_ENTRY: addon.ingress_entry,
|
||||
ATTR_INGRESS_URL: addon.ingress_url,
|
||||
ATTR_INGRESS_PORT: addon.ingress_port,
|
||||
ATTR_INGRESS_PANEL: addon.ingress_panel,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_IP_ADDRESS: str(addon.ip_address),
|
||||
ATTR_VERSION: addon.version,
|
||||
}
|
||||
)
|
||||
|
||||
return data
|
||||
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Store user options for add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
|
||||
# Update secrets for validation
|
||||
await self.sys_secrets.reload()
|
||||
|
||||
# Extend schema with add-on specific validation
|
||||
addon_schema = SCHEMA_OPTIONS.extend(
|
||||
{vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema)}
|
||||
)
|
||||
|
||||
# Validate/Process Body
|
||||
body = await api_validate(addon_schema, request, origin=[ATTR_OPTIONS])
|
||||
if ATTR_OPTIONS in body:
|
||||
addon.options = body[ATTR_OPTIONS]
|
||||
if ATTR_BOOT in body:
|
||||
addon.boot = body[ATTR_BOOT]
|
||||
if ATTR_AUTO_UPDATE in body:
|
||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||
if ATTR_NETWORK in body:
|
||||
addon.ports = body[ATTR_NETWORK]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
if ATTR_INGRESS_PANEL in body:
|
||||
addon.ingress_panel = body[ATTR_INGRESS_PANEL]
|
||||
await self.sys_ingress.update_hass_panel(addon)
|
||||
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def security(self, request: web.Request) -> None:
|
||||
"""Store security options for add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
body: Dict[str, Any] = await api_validate(SCHEMA_SECURITY, request)
|
||||
|
||||
if ATTR_PROTECTED in body:
|
||||
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_persist()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
stats: DockerStats = await addon.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
def install(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Install add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Uninstall add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
def start(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Start add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Stop add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def update(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Update add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
|
||||
if addon.latest_version == addon.version:
|
||||
raise APIError("No update available!")
|
||||
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Rebuild local build add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
if not addon.need_build:
|
||||
raise APIError("Only local build addons are supported")
|
||||
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return logs from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def icon(self, request: web.Request) -> bytes:
|
||||
"""Return icon from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_icon:
|
||||
raise APIError("No icon found!")
|
||||
|
||||
with addon.path_icon.open("rb") as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request: web.Request) -> bytes:
|
||||
"""Return logo from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
raise APIError("No logo found!")
|
||||
|
||||
with addon.path_logo.open("rb") as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def changelog(self, request: web.Request) -> str:
|
||||
"""Return changelog from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_changelog:
|
||||
raise APIError("No changelog found!")
|
||||
|
||||
with addon.path_changelog.open("r") as changelog:
|
||||
return changelog.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def documentation(self, request: web.Request) -> str:
|
||||
"""Return documentation from add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_documentation:
|
||||
raise APIError("No documentation found!")
|
||||
|
||||
with addon.path_documentation.open("r") as documentation:
|
||||
return documentation.read()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request: web.Request) -> None:
|
||||
"""Write to stdin of add-on."""
|
||||
addon: AnyAddon = self._extract_addon(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError("STDIN not supported by add-on")
|
||||
|
||||
data = await request.read()
|
||||
await asyncio.shield(addon.write_stdin(data))
|
||||
|
||||
|
||||
def _pretty_devices(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return None
|
||||
return [row.split(":")[0] for row in dev_list]
|
||||
|
||||
|
||||
def _pretty_services(addon: AnyAddon) -> List[str]:
|
||||
"""Return a simplified services role list."""
|
||||
services = []
|
||||
for name, access in addon.services_role.items():
|
||||
services.append(f"{name}:{access}")
|
||||
return services
|
170
supervisor/api/audio.py
Normal file
170
supervisor/api/audio.py
Normal file
@@ -0,0 +1,170 @@
|
||||
"""Init file for Supervisor Audio RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACTIVE,
|
||||
ATTR_APPLICATION,
|
||||
ATTR_AUDIO,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CARD,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_HOST,
|
||||
ATTR_INDEX,
|
||||
ATTR_INPUT,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NAME,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_OUTPUT,
|
||||
ATTR_VERSION,
|
||||
ATTR_VOLUME,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..host.sound import StreamType
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
SCHEMA_VOLUME = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_INDEX): vol.Coerce(int),
|
||||
vol.Required(ATTR_VOLUME): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_MUTE = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_INDEX): vol.Coerce(int),
|
||||
vol.Required(ATTR_ACTIVE): vol.Boolean(),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_DEFAULT = vol.Schema({vol.Required(ATTR_NAME): vol.Coerce(str)})
|
||||
|
||||
SCHEMA_PROFILE = vol.Schema(
|
||||
{vol.Required(ATTR_CARD): vol.Coerce(str), vol.Required(ATTR_NAME): vol.Coerce(str)}
|
||||
)
|
||||
|
||||
|
||||
class APIAudio(CoreSysAttributes):
|
||||
"""Handle RESTful API for Audio functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return Audio information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_audio.version,
|
||||
ATTR_VERSION_LATEST: self.sys_audio.latest_version,
|
||||
ATTR_HOST: str(self.sys_docker.network.audio),
|
||||
ATTR_AUDIO: {
|
||||
ATTR_CARD: [attr.asdict(card) for card in self.sys_host.sound.cards],
|
||||
ATTR_INPUT: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.inputs
|
||||
],
|
||||
ATTR_OUTPUT: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.outputs
|
||||
],
|
||||
ATTR_APPLICATION: [
|
||||
attr.asdict(stream) for stream in self.sys_host.sound.applications
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_audio.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Audio plugin."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_audio.latest_version)
|
||||
|
||||
if version == self.sys_audio.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
await asyncio.shield(self.sys_audio.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return Audio Docker logs."""
|
||||
return self.sys_audio.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart Audio plugin."""
|
||||
return asyncio.shield(self.sys_audio.restart())
|
||||
|
||||
@api_process
|
||||
def reload(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reload Audio information."""
|
||||
return asyncio.shield(self.sys_host.sound.update())
|
||||
|
||||
@api_process
|
||||
async def set_volume(self, request: web.Request) -> None:
|
||||
"""Set audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_VOLUME, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.set_volume(
|
||||
source, body[ATTR_INDEX], body[ATTR_VOLUME], application
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def set_mute(self, request: web.Request) -> None:
|
||||
"""Mute audio volume on stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
application: bool = request.path.endswith("application")
|
||||
body = await api_validate(SCHEMA_MUTE, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.set_mute(
|
||||
source, body[ATTR_INDEX], body[ATTR_ACTIVE], application
|
||||
)
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def set_default(self, request: web.Request) -> None:
|
||||
"""Set audio default stream."""
|
||||
source: StreamType = StreamType(request.match_info.get("source"))
|
||||
body = await api_validate(SCHEMA_DEFAULT, request)
|
||||
|
||||
await asyncio.shield(self.sys_host.sound.set_default(source, body[ATTR_NAME]))
|
||||
|
||||
@api_process
|
||||
async def set_profile(self, request: web.Request) -> None:
|
||||
"""Set audio default sources."""
|
||||
body = await api_validate(SCHEMA_PROFILE, request)
|
||||
|
||||
await asyncio.shield(
|
||||
self.sys_host.sound.ativate_profile(body[ATTR_CARD], body[ATTR_NAME])
|
||||
)
|
88
supervisor/api/auth.py
Normal file
88
supervisor/api/auth.py
Normal file
@@ -0,0 +1,88 @@
|
||||
"""Init file for Supervisor auth/SSO RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Dict
|
||||
|
||||
from aiohttp import BasicAuth, web
|
||||
from aiohttp.hdrs import AUTHORIZATION, CONTENT_TYPE, WWW_AUTHENTICATE
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
import voluptuous as vol
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_PASSWORD,
|
||||
ATTR_USERNAME,
|
||||
CONTENT_TYPE_JSON,
|
||||
CONTENT_TYPE_URL,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_PASSWORD_RESET = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_USERNAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_PASSWORD): vol.Coerce(str),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request: web.Request, addon: Addon) -> bool:
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
|
||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||
|
||||
def _process_dict(
|
||||
self, request: web.Request, addon: Addon, data: Dict[str, str]
|
||||
) -> bool:
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
username = data.get("username") or data.get("user")
|
||||
password = data.get("password")
|
||||
|
||||
return self.sys_auth.check_login(addon, username, password)
|
||||
|
||||
@api_process
|
||||
async def auth(self, request: web.Request) -> bool:
|
||||
"""Process login request."""
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
if not addon.access_auth_api:
|
||||
raise APIForbidden("Can't use Home Assistant auth!")
|
||||
|
||||
# BasicAuth
|
||||
if AUTHORIZATION in request.headers:
|
||||
return await self._process_basic(request, addon)
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
data = await request.json()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
# URL encoded
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(
|
||||
headers={WWW_AUTHENTICATE: 'Basic realm="Home Assistant Authentication"'}
|
||||
)
|
||||
|
||||
@api_process
|
||||
async def reset(self, request: web.Request) -> None:
|
||||
"""Process reset password request."""
|
||||
body: Dict[str, str] = await api_validate(SCHEMA_PASSWORD_RESET, request)
|
||||
await asyncio.shield(
|
||||
self.sys_auth.change_password(body[ATTR_USERNAME], body[ATTR_PASSWORD])
|
||||
)
|
62
supervisor/api/cli.py
Normal file
62
supervisor/api/cli.py
Normal file
@@ -0,0 +1,62 @@
|
||||
"""Init file for Supervisor HA cli RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APICli(CoreSysAttributes):
|
||||
"""Handle RESTful API for HA Cli functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HA cli information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_cli.version,
|
||||
ATTR_VERSION_LATEST: self.sys_cli.latest_version,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_cli.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HA CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_cli.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_cli.update(version))
|
100
supervisor/api/discovery.py
Normal file
100
supervisor/api/discovery.py
Normal file
@@ -0,0 +1,100 @@
|
||||
"""Init file for Supervisor network RESTful API."""
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDON,
|
||||
ATTR_UUID,
|
||||
ATTR_CONFIG,
|
||||
ATTR_DISCOVERY,
|
||||
ATTR_SERVICE,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError, APIForbidden
|
||||
from ..discovery.validate import valid_discovery_service
|
||||
|
||||
|
||||
SCHEMA_DISCOVERY = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_SERVICE): valid_discovery_service,
|
||||
vol.Optional(ATTR_CONFIG): vol.Maybe(dict),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class APIDiscovery(CoreSysAttributes):
|
||||
"""Handle RESTful API for discovery functions."""
|
||||
|
||||
def _extract_message(self, request):
|
||||
"""Extract discovery message from URL."""
|
||||
message = self.sys_discovery.get(request.match_info.get("uuid"))
|
||||
if not message:
|
||||
raise APIError("Discovery message not found")
|
||||
return message
|
||||
|
||||
def _check_permission_ha(self, request):
|
||||
"""Check permission for API call / Home Assistant."""
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
raise APIForbidden("Only HomeAssistant can use this API!")
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Show register services."""
|
||||
self._check_permission_ha(request)
|
||||
|
||||
discovery = []
|
||||
for message in self.sys_discovery.list_messages:
|
||||
discovery.append(
|
||||
{
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
)
|
||||
|
||||
return {ATTR_DISCOVERY: discovery}
|
||||
|
||||
@api_process
|
||||
async def set_discovery(self, request):
|
||||
"""Write data into a discovery pipeline."""
|
||||
body = await api_validate(SCHEMA_DISCOVERY, request)
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
# Access?
|
||||
if body[ATTR_SERVICE] not in addon.discovery:
|
||||
raise APIForbidden(f"Can't use discovery!")
|
||||
|
||||
# Process discovery message
|
||||
message = self.sys_discovery.send(addon, **body)
|
||||
|
||||
return {ATTR_UUID: message.uuid}
|
||||
|
||||
@api_process
|
||||
async def get_discovery(self, request):
|
||||
"""Read data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
|
||||
# HomeAssistant?
|
||||
self._check_permission_ha(request)
|
||||
|
||||
return {
|
||||
ATTR_ADDON: message.addon,
|
||||
ATTR_SERVICE: message.service,
|
||||
ATTR_UUID: message.uuid,
|
||||
ATTR_CONFIG: message.config,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def del_discovery(self, request):
|
||||
"""Delete data into a discovery message."""
|
||||
message = self._extract_message(request)
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
# Permission
|
||||
if message.addon != addon.slug:
|
||||
raise APIForbidden(f"Can't remove discovery message")
|
||||
|
||||
self.sys_discovery.remove(message)
|
||||
return True
|
102
supervisor/api/dns.py
Normal file
102
supervisor/api/dns.py
Normal file
@@ -0,0 +1,102 @@
|
||||
"""Init file for Supervisor DNS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_HOST,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_LOCALS,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_SERVERS,
|
||||
ATTR_VERSION,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import dns_server_list
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_SERVERS): dns_server_list})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APICoreDNS(CoreSysAttributes):
|
||||
"""Handle RESTful API for DNS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return DNS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_dns.version,
|
||||
ATTR_VERSION_LATEST: self.sys_dns.latest_version,
|
||||
ATTR_HOST: str(self.sys_docker.network.dns),
|
||||
ATTR_SERVERS: self.sys_dns.servers,
|
||||
ATTR_LOCALS: self.sys_host.network.dns_servers,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set DNS options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_SERVERS in body:
|
||||
self.sys_dns.servers = body[ATTR_SERVERS]
|
||||
self.sys_create_task(self.sys_dns.restart())
|
||||
|
||||
self.sys_dns.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_dns.stats()
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update DNS plugin."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_dns.latest_version)
|
||||
|
||||
if version == self.sys_dns.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
await asyncio.shield(self.sys_dns.update(version))
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Awaitable[bytes]:
|
||||
"""Return DNS Docker logs."""
|
||||
return self.sys_dns.logs()
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Restart CoreDNS plugin."""
|
||||
return asyncio.shield(self.sys_dns.restart())
|
||||
|
||||
@api_process
|
||||
def reset(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Reset CoreDNS plugin."""
|
||||
return asyncio.shield(self.sys_dns.reset())
|
57
supervisor/api/hardware.py
Normal file
57
supervisor/api/hardware.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Init file for Supervisor hardware RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_AUDIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(
|
||||
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||
),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show pulse audio profiles."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: {
|
||||
profile.name: profile.description
|
||||
for profile in self.sys_host.sound.inputs
|
||||
},
|
||||
ATTR_OUTPUT: {
|
||||
profile.name: profile.description
|
||||
for profile in self.sys_host.sound.outputs
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
@api_process
|
||||
def trigger(self, request: web.Request) -> None:
|
||||
"""Trigger a udev device reload."""
|
||||
return asyncio.shield(self.sys_hardware.udev_trigger())
|
177
supervisor/api/homeassistant.py
Normal file
177
supervisor/api/homeassistant.py
Normal file
@@ -0,0 +1,177 @@
|
||||
"""Init file for Supervisor Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Coroutine, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT,
|
||||
ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE,
|
||||
ATTR_BOOT,
|
||||
ATTR_CPU_PERCENT,
|
||||
ATTR_CUSTOM,
|
||||
ATTR_IMAGE,
|
||||
ATTR_IP_ADDRESS,
|
||||
ATTR_VERSION_LATEST,
|
||||
ATTR_MACHINE,
|
||||
ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_PERCENT,
|
||||
ATTR_MEMORY_USAGE,
|
||||
ATTR_NETWORK_RX,
|
||||
ATTR_NETWORK_TX,
|
||||
ATTR_PORT,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SSL,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import docker_image, network_port
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_BOOT): vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): vol.Maybe(docker_image),
|
||||
vol.Inclusive(ATTR_VERSION_LATEST, "custom_hass"): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_PORT): network_port,
|
||||
vol.Optional(ATTR_SSL): vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG): vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT): vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_AUDIO_INPUT): vol.Maybe(vol.Coerce(str)),
|
||||
}
|
||||
)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle RESTful API for Home Assistant functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_VERSION_LATEST: self.sys_homeassistant.latest_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_IP_ADDRESS: str(self.sys_homeassistant.ip_address),
|
||||
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||
ATTR_PORT: self.sys_homeassistant.api_port,
|
||||
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||
ATTR_AUDIO_INPUT: self.sys_homeassistant.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: self.sys_homeassistant.audio_output,
|
||||
# Remove end of Q3 2020
|
||||
"last_version": self.sys_homeassistant.latest_version,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request: web.Request) -> None:
|
||||
"""Set Home Assistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body and ATTR_VERSION_LATEST in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.latest_version = body[ATTR_VERSION_LATEST]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
|
||||
if ATTR_PORT in body:
|
||||
self.sys_homeassistant.api_port = body[ATTR_PORT]
|
||||
|
||||
if ATTR_SSL in body:
|
||||
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||
|
||||
if ATTR_WATCHDOG in body:
|
||||
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
self.sys_homeassistant.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
self.sys_homeassistant.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request: web.Request) -> Dict[Any, str]:
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_homeassistant.stats()
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_MEMORY_PERCENT: stats.memory_percent,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update Home Assistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||
|
||||
@api_process
|
||||
def stop(self, request: web.Request) -> Coroutine:
|
||||
"""Stop Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.stop())
|
||||
|
||||
@api_process
|
||||
def start(self, request: web.Request) -> Coroutine:
|
||||
"""Start Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.start())
|
||||
|
||||
@api_process
|
||||
def restart(self, request: web.Request) -> Coroutine:
|
||||
"""Restart Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request: web.Request) -> Coroutine:
|
||||
"""Rebuild Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request: web.Request) -> Coroutine:
|
||||
"""Return Home Assistant Docker logs."""
|
||||
return self.sys_homeassistant.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request: web.Request) -> None:
|
||||
"""Check configuration of Home Assistant."""
|
||||
result = await self.sys_homeassistant.check_config()
|
||||
if not result.valid:
|
||||
raise APIError(result.log)
|
109
supervisor/api/host.py
Normal file
109
supervisor/api/host.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Init file for Supervisor host RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_FEATURES,
|
||||
ATTR_KERNEL,
|
||||
ATTR_OPERATING_SYSTEM,
|
||||
ATTR_CHASSIS,
|
||||
ATTR_DEPLOYMENT,
|
||||
ATTR_STATE,
|
||||
ATTR_NAME,
|
||||
ATTR_DESCRIPTON,
|
||||
ATTR_SERVICES,
|
||||
ATTR_CPE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SERVICE = "service"
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({vol.Optional(ATTR_HOSTNAME): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHost(CoreSysAttributes):
|
||||
"""Handle RESTful API for host functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_CHASSIS: self.sys_host.info.chassis,
|
||||
ATTR_CPE: self.sys_host.info.cpe,
|
||||
ATTR_FEATURES: self.sys_host.supperted_features,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_OPERATING_SYSTEM: self.sys_host.info.operating_system,
|
||||
ATTR_DEPLOYMENT: self.sys_host.info.deployment,
|
||||
ATTR_KERNEL: self.sys_host.info.kernel,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Edit host settings."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
# hostname
|
||||
if ATTR_HOSTNAME in body:
|
||||
await asyncio.shield(
|
||||
self.sys_host.control.set_hostname(body[ATTR_HOSTNAME])
|
||||
)
|
||||
|
||||
@api_process
|
||||
def reboot(self, request):
|
||||
"""Reboot host."""
|
||||
return asyncio.shield(self.sys_host.control.reboot())
|
||||
|
||||
@api_process
|
||||
def shutdown(self, request):
|
||||
"""Poweroff host."""
|
||||
return asyncio.shield(self.sys_host.control.shutdown())
|
||||
|
||||
@api_process
|
||||
def reload(self, request):
|
||||
"""Reload host data."""
|
||||
return asyncio.shield(self.sys_host.reload())
|
||||
|
||||
@api_process
|
||||
async def services(self, request):
|
||||
"""Return list of available services."""
|
||||
services = []
|
||||
for unit in self.sys_host.services:
|
||||
services.append(
|
||||
{
|
||||
ATTR_NAME: unit.name,
|
||||
ATTR_DESCRIPTON: unit.description,
|
||||
ATTR_STATE: unit.state,
|
||||
}
|
||||
)
|
||||
|
||||
return {ATTR_SERVICES: services}
|
||||
|
||||
@api_process
|
||||
def service_start(self, request):
|
||||
"""Start a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.start(unit))
|
||||
|
||||
@api_process
|
||||
def service_stop(self, request):
|
||||
"""Stop a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.stop(unit))
|
||||
|
||||
@api_process
|
||||
def service_reload(self, request):
|
||||
"""Reload a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.reload(unit))
|
||||
|
||||
@api_process
|
||||
def service_restart(self, request):
|
||||
"""Restart a service."""
|
||||
unit = request.match_info.get(SERVICE)
|
||||
return asyncio.shield(self.sys_host.services.restart(unit))
|
42
supervisor/api/info.py
Normal file
42
supervisor/api/info.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""Init file for Supervisor info RESTful API."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARCH,
|
||||
ATTR_CHANNEL,
|
||||
ATTR_HASSOS,
|
||||
ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME,
|
||||
ATTR_LOGGING,
|
||||
ATTR_MACHINE,
|
||||
ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED_ARCH,
|
||||
ATTR_TIMEZONE,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIInfo(CoreSysAttributes):
|
||||
"""Handle RESTful API for info functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Show system info."""
|
||||
return {
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
||||
ATTR_HASSOS: self.sys_hassos.version,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_MACHINE: self.sys_machine,
|
||||
ATTR_ARCH: self.sys_arch.default,
|
||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_LOGGING: self.sys_config.logging,
|
||||
ATTR_TIMEZONE: self.sys_timezone,
|
||||
}
|
273
supervisor/api/ingress.py
Normal file
273
supervisor/api/ingress.py
Normal file
@@ -0,0 +1,273 @@
|
||||
"""Supervisor Add-on ingress service."""
|
||||
import asyncio
|
||||
from ipaddress import ip_address
|
||||
import logging
|
||||
from typing import Any, Dict, Union
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs, web
|
||||
from aiohttp.web_exceptions import (
|
||||
HTTPBadGateway,
|
||||
HTTPServiceUnavailable,
|
||||
HTTPUnauthorized,
|
||||
)
|
||||
from multidict import CIMultiDict, istr
|
||||
|
||||
from ..addons.addon import Addon
|
||||
from ..const import (
|
||||
ATTR_ADMIN,
|
||||
ATTR_ICON,
|
||||
ATTR_SESSION,
|
||||
ATTR_TITLE,
|
||||
ATTR_PANELS,
|
||||
ATTR_ENABLE,
|
||||
COOKIE_INGRESS,
|
||||
HEADER_TOKEN,
|
||||
HEADER_TOKEN_OLD,
|
||||
REQUEST_FROM,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIIngress(CoreSysAttributes):
|
||||
"""Ingress view to handle add-on webui routing."""
|
||||
|
||||
def _extract_addon(self, request: web.Request) -> Addon:
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
token = request.match_info.get("token")
|
||||
|
||||
# Find correct add-on
|
||||
addon = self.sys_ingress.get(token)
|
||||
if not addon:
|
||||
_LOGGER.warning("Ingress for %s not available", token)
|
||||
raise HTTPServiceUnavailable()
|
||||
|
||||
return addon
|
||||
|
||||
def _check_ha_access(self, request: web.Request) -> None:
|
||||
if request[REQUEST_FROM] != self.sys_homeassistant:
|
||||
_LOGGER.warning("Ingress is only available behind Home Assistant")
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
def _create_url(self, addon: Addon, path: str) -> str:
|
||||
"""Create URL to container."""
|
||||
return f"http://{addon.ip_address}:{addon.ingress_port}/{path}"
|
||||
|
||||
@api_process
|
||||
async def panels(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a list of panel data."""
|
||||
addons = {}
|
||||
for addon in self.sys_ingress.addons:
|
||||
addons[addon.slug] = {
|
||||
ATTR_TITLE: addon.panel_title,
|
||||
ATTR_ICON: addon.panel_icon,
|
||||
ATTR_ADMIN: addon.panel_admin,
|
||||
ATTR_ENABLE: addon.ingress_panel,
|
||||
}
|
||||
|
||||
return {ATTR_PANELS: addons}
|
||||
|
||||
@api_process
|
||||
async def create_session(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Create a new session."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
session = self.sys_ingress.create_session()
|
||||
return {ATTR_SESSION: session}
|
||||
|
||||
async def handler(
|
||||
self, request: web.Request
|
||||
) -> Union[web.Response, web.StreamResponse, web.WebSocketResponse]:
|
||||
"""Route data to Supervisor ingress service."""
|
||||
self._check_ha_access(request)
|
||||
|
||||
# Check Ingress Session
|
||||
session = request.cookies.get(COOKIE_INGRESS)
|
||||
if not self.sys_ingress.validate_session(session):
|
||||
_LOGGER.warning("No valid ingress session %s", session)
|
||||
raise HTTPUnauthorized()
|
||||
|
||||
# Process requests
|
||||
addon = self._extract_addon(request)
|
||||
path = request.match_info.get("path")
|
||||
try:
|
||||
# Websocket
|
||||
if _is_websocket(request):
|
||||
return await self._handle_websocket(request, addon, path)
|
||||
|
||||
# Request
|
||||
return await self._handle_request(request, addon, path)
|
||||
|
||||
except aiohttp.ClientError as err:
|
||||
_LOGGER.error("Ingress error: %s", err)
|
||||
|
||||
raise HTTPBadGateway() from None
|
||||
|
||||
async def _handle_websocket(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> web.WebSocketResponse:
|
||||
"""Ingress route for websocket."""
|
||||
if hdrs.SEC_WEBSOCKET_PROTOCOL in request.headers:
|
||||
req_protocols = [
|
||||
str(proto.strip())
|
||||
for proto in request.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
||||
]
|
||||
else:
|
||||
req_protocols = ()
|
||||
|
||||
ws_server = web.WebSocketResponse(
|
||||
protocols=req_protocols, autoclose=False, autoping=False
|
||||
)
|
||||
await ws_server.prepare(request)
|
||||
|
||||
# Preparing
|
||||
url = self._create_url(addon, path)
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
# Support GET query
|
||||
if request.query_string:
|
||||
url = "{}?{}".format(url, request.query_string)
|
||||
|
||||
# Start proxy
|
||||
async with self.sys_websession.ws_connect(
|
||||
url,
|
||||
headers=source_header,
|
||||
protocols=req_protocols,
|
||||
autoclose=False,
|
||||
autoping=False,
|
||||
) as ws_client:
|
||||
# Proxy requests
|
||||
await asyncio.wait(
|
||||
[
|
||||
_websocket_forward(ws_server, ws_client),
|
||||
_websocket_forward(ws_client, ws_server),
|
||||
],
|
||||
return_when=asyncio.FIRST_COMPLETED,
|
||||
)
|
||||
|
||||
return ws_server
|
||||
|
||||
async def _handle_request(
|
||||
self, request: web.Request, addon: Addon, path: str
|
||||
) -> Union[web.Response, web.StreamResponse]:
|
||||
"""Ingress route for request."""
|
||||
url = self._create_url(addon, path)
|
||||
data = await request.read()
|
||||
source_header = _init_header(request, addon)
|
||||
|
||||
async with self.sys_websession.request(
|
||||
request.method,
|
||||
url,
|
||||
headers=source_header,
|
||||
params=request.query,
|
||||
allow_redirects=False,
|
||||
data=data,
|
||||
) as result:
|
||||
headers = _response_header(result)
|
||||
|
||||
# Simple request
|
||||
if (
|
||||
hdrs.CONTENT_LENGTH in result.headers
|
||||
and int(result.headers.get(hdrs.CONTENT_LENGTH, 0)) < 4_194_000
|
||||
):
|
||||
# Return Response
|
||||
body = await result.read()
|
||||
return web.Response(
|
||||
headers=headers,
|
||||
status=result.status,
|
||||
content_type=result.content_type,
|
||||
body=body,
|
||||
)
|
||||
|
||||
# Stream response
|
||||
response = web.StreamResponse(status=result.status, headers=headers)
|
||||
response.content_type = result.content_type
|
||||
|
||||
try:
|
||||
await response.prepare(request)
|
||||
async for data in result.content.iter_chunked(4096):
|
||||
await response.write(data)
|
||||
|
||||
except (aiohttp.ClientError, aiohttp.ClientPayloadError) as err:
|
||||
_LOGGER.error("Stream error with %s: %s", url, err)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def _init_header(
|
||||
request: web.Request, addon: str
|
||||
) -> Union[CIMultiDict, Dict[str, str]]:
|
||||
"""Create initial header."""
|
||||
headers = {}
|
||||
|
||||
# filter flags
|
||||
for name, value in request.headers.items():
|
||||
if name in (
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
||||
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
||||
hdrs.SEC_WEBSOCKET_VERSION,
|
||||
hdrs.SEC_WEBSOCKET_KEY,
|
||||
istr(HEADER_TOKEN),
|
||||
istr(HEADER_TOKEN_OLD),
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
# Update X-Forwarded-For
|
||||
forward_for = request.headers.get(hdrs.X_FORWARDED_FOR)
|
||||
connected_ip = ip_address(request.transport.get_extra_info("peername")[0])
|
||||
headers[hdrs.X_FORWARDED_FOR] = f"{forward_for}, {connected_ip!s}"
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _response_header(response: aiohttp.ClientResponse) -> Dict[str, str]:
|
||||
"""Create response header."""
|
||||
headers = {}
|
||||
|
||||
for name, value in response.headers.items():
|
||||
if name in (
|
||||
hdrs.TRANSFER_ENCODING,
|
||||
hdrs.CONTENT_LENGTH,
|
||||
hdrs.CONTENT_TYPE,
|
||||
hdrs.CONTENT_ENCODING,
|
||||
):
|
||||
continue
|
||||
headers[name] = value
|
||||
|
||||
return headers
|
||||
|
||||
|
||||
def _is_websocket(request: web.Request) -> bool:
|
||||
"""Return True if request is a websocket."""
|
||||
headers = request.headers
|
||||
|
||||
if (
|
||||
"upgrade" in headers.get(hdrs.CONNECTION, "").lower()
|
||||
and headers.get(hdrs.UPGRADE, "").lower() == "websocket"
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
async def _websocket_forward(ws_from, ws_to):
|
||||
"""Handle websocket message directly."""
|
||||
try:
|
||||
async for msg in ws_from:
|
||||
if msg.type == aiohttp.WSMsgType.TEXT:
|
||||
await ws_to.send_str(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.BINARY:
|
||||
await ws_to.send_bytes(msg.data)
|
||||
elif msg.type == aiohttp.WSMsgType.PING:
|
||||
await ws_to.ping()
|
||||
elif msg.type == aiohttp.WSMsgType.PONG:
|
||||
await ws_to.pong()
|
||||
elif ws_to.closed:
|
||||
await ws_to.close(code=ws_to.close_code, message=msg.extra)
|
||||
except RuntimeError:
|
||||
_LOGGER.warning("Ingress Websocket runtime error")
|
47
supervisor/api/os.py
Normal file
47
supervisor/api/os.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Init file for Supervisor HassOS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_BOARD,
|
||||
ATTR_BOOT,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER: logging.Logger = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for OS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return OS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.latest_version,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
ATTR_BOOT: self.sys_dbus.rauc.boot_slot,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.latest_version)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger config reload on OS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
2
supervisor/api/panel/201359fd5a526afe13ef.worker.js
Normal file
2
supervisor/api/panel/201359fd5a526afe13ef.worker.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/201359fd5a526afe13ef.worker.js.gz
Normal file
BIN
supervisor/api/panel/201359fd5a526afe13ef.worker.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/201359fd5a526afe13ef.worker.js.map
Normal file
1
supervisor/api/panel/201359fd5a526afe13ef.worker.js.map
Normal file
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/a1ebfa0a88593a3b571c.worker.js
Normal file
2
supervisor/api/panel/a1ebfa0a88593a3b571c.worker.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/a1ebfa0a88593a3b571c.worker.js.gz
Normal file
BIN
supervisor/api/panel/a1ebfa0a88593a3b571c.worker.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/a1ebfa0a88593a3b571c.worker.js.map
Normal file
1
supervisor/api/panel/a1ebfa0a88593a3b571c.worker.js.map
Normal file
File diff suppressed because one or more lines are too long
3
supervisor/api/panel/chunk.05d0c315f85f2fc8c34a.js
Normal file
3
supervisor/api/panel/chunk.05d0c315f85f2fc8c34a.js
Normal file
File diff suppressed because one or more lines are too long
10
supervisor/api/panel/chunk.05d0c315f85f2fc8c34a.js.LICENSE
Normal file
10
supervisor/api/panel/chunk.05d0c315f85f2fc8c34a.js.LICENSE
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
supervisor/api/panel/chunk.05d0c315f85f2fc8c34a.js.gz
Normal file
BIN
supervisor/api/panel/chunk.05d0c315f85f2fc8c34a.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/chunk.05d0c315f85f2fc8c34a.js.map
Normal file
1
supervisor/api/panel/chunk.05d0c315f85f2fc8c34a.js.map
Normal file
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/chunk.0b82745c7bdffe5c1404.js
Normal file
2
supervisor/api/panel/chunk.0b82745c7bdffe5c1404.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/chunk.0b82745c7bdffe5c1404.js.gz
Normal file
BIN
supervisor/api/panel/chunk.0b82745c7bdffe5c1404.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/chunk.0b82745c7bdffe5c1404.js.map
Normal file
1
supervisor/api/panel/chunk.0b82745c7bdffe5c1404.js.map
Normal file
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/chunk.1a25d23325fed5a4d90b.js
Normal file
2
supervisor/api/panel/chunk.1a25d23325fed5a4d90b.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/chunk.1a25d23325fed5a4d90b.js.gz
Normal file
BIN
supervisor/api/panel/chunk.1a25d23325fed5a4d90b.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/chunk.1a25d23325fed5a4d90b.js.map
Normal file
1
supervisor/api/panel/chunk.1a25d23325fed5a4d90b.js.map
Normal file
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/chunk.26756b56961f7bf94974.js
Normal file
2
supervisor/api/panel/chunk.26756b56961f7bf94974.js
Normal file
@@ -0,0 +1,2 @@
|
||||
(self.webpackJsonp=self.webpackJsonp||[]).push([[2],{177:function(e,r,n){"use strict";n.r(r),n.d(r,"codeMirror",function(){return c}),n.d(r,"codeMirrorCss",function(){return i});var a=n(54),o=n.n(a),s=n(170),t=(n(171),n(172),n(11));o.a.commands.save=function(e){Object(t.a)(e.getWrapperElement(),"editor-save")};var c=o.a,i=s.a}}]);
|
||||
//# sourceMappingURL=chunk.26756b56961f7bf94974.js.map
|
BIN
supervisor/api/panel/chunk.26756b56961f7bf94974.js.gz
Normal file
BIN
supervisor/api/panel/chunk.26756b56961f7bf94974.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/chunk.26756b56961f7bf94974.js.map
Normal file
1
supervisor/api/panel/chunk.26756b56961f7bf94974.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["webpack:///./src/resources/codemirror.ts"],"names":["__webpack_require__","r","__webpack_exports__","d","codeMirror","codeMirrorCss","codemirror__WEBPACK_IMPORTED_MODULE_0__","codemirror__WEBPACK_IMPORTED_MODULE_0___default","n","codemirror_lib_codemirror_css__WEBPACK_IMPORTED_MODULE_1__","_common_dom_fire_event__WEBPACK_IMPORTED_MODULE_4__","_CodeMirror","commands","save","cm","fireEvent","getWrapperElement","_codeMirrorCss"],"mappings":"sFAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,+BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,kCAAAG,IAAA,IAAAC,EAAAN,EAAA,IAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,KAAAU,GAAAV,EAAA,KAAAA,EAAA,KAAAA,EAAA,KAQAW,IAAYC,SAASC,KAAO,SAACC,GAC3BC,YAAUD,EAAGE,oBAAqB,gBAE7B,IAAMZ,EAAkBO,IAClBN,EAAqBY","file":"chunk.26756b56961f7bf94974.js","sourcesContent":["// @ts-ignore\nimport _CodeMirror, { Editor } from \"codemirror\";\n// @ts-ignore\nimport _codeMirrorCss from \"codemirror/lib/codemirror.css\";\nimport \"codemirror/mode/yaml/yaml\";\nimport \"codemirror/mode/jinja2/jinja2\";\nimport { fireEvent } from \"../common/dom/fire_event\";\n\n_CodeMirror.commands.save = (cm: Editor) => {\n fireEvent(cm.getWrapperElement(), \"editor-save\");\n};\nexport const codeMirror: any = _CodeMirror;\nexport const codeMirrorCss: any = _codeMirrorCss;\n"],"sourceRoot":""}
|
3
supervisor/api/panel/chunk.26be881fcb628958e718.js
Normal file
3
supervisor/api/panel/chunk.26be881fcb628958e718.js
Normal file
File diff suppressed because one or more lines are too long
10
supervisor/api/panel/chunk.26be881fcb628958e718.js.LICENSE
Normal file
10
supervisor/api/panel/chunk.26be881fcb628958e718.js.LICENSE
Normal file
@@ -0,0 +1,10 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
BIN
supervisor/api/panel/chunk.26be881fcb628958e718.js.gz
Normal file
BIN
supervisor/api/panel/chunk.26be881fcb628958e718.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/chunk.26be881fcb628958e718.js.map
Normal file
1
supervisor/api/panel/chunk.26be881fcb628958e718.js.map
Normal file
File diff suppressed because one or more lines are too long
2
supervisor/api/panel/chunk.35929da61d769e57c884.js
Normal file
2
supervisor/api/panel/chunk.35929da61d769e57c884.js
Normal file
File diff suppressed because one or more lines are too long
BIN
supervisor/api/panel/chunk.35929da61d769e57c884.js.gz
Normal file
BIN
supervisor/api/panel/chunk.35929da61d769e57c884.js.gz
Normal file
Binary file not shown.
1
supervisor/api/panel/chunk.35929da61d769e57c884.js.map
Normal file
1
supervisor/api/panel/chunk.35929da61d769e57c884.js.map
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"sources":["webpack:///./hassio/src/ingress-view/hassio-ingress-view.ts"],"names":["customElement","HassioIngressView","property","this","_addon","html","_templateObject2","name","ingress_url","_templateObject","changedProps","_get","_getPrototypeOf","prototype","call","has","addon","route","path","substr","oldRoute","get","oldAddon","undefined","_fetchData","_callee","addonSlug","_ref","_ref2","regeneratorRuntime","wrap","_context","prev","next","Promise","all","fetchHassioAddonInfo","hass","Error","createHassioSession","sent","_slicedToArray","ingress","t0","console","error","alert","message","history","back","stop","css","_templateObject3","LitElement"],"mappings":"snSAmBCA,YAAc,0CACTC,smBACHC,kEACAA,mEACAA,4EAED,WACE,OAAKC,KAAKC,OAMHC,YAAPC,IAC0BH,KAAKC,OAAOG,KACpBJ,KAAKC,OAAOI,aAPrBH,YAAPI,0CAYJ,SAAkBC,GAGhB,GAFAC,EAAAC,EApBEX,EAoBFY,WAAA,eAAAV,MAAAW,KAAAX,KAAmBO,GAEdA,EAAaK,IAAI,SAAtB,CAIA,IAAMC,EAAQb,KAAKc,MAAMC,KAAKC,OAAO,GAE/BC,EAAWV,EAAaW,IAAI,SAC5BC,EAAWF,EAAWA,EAASF,KAAKC,OAAO,QAAKI,EAElDP,GAASA,IAAUM,GACrBnB,KAAKqB,WAAWR,0FAIpB,SAAAS,EAAyBC,GAAzB,IAAAC,EAAAC,EAAAZ,EAAA,OAAAa,mBAAAC,KAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAC,KAAA,EAAAD,EAAAE,KAAA,EAE0BC,QAAQC,IAAI,CAChCC,YAAqBjC,KAAKkC,KAAMX,GAAhC,MAAiD,WAC/C,MAAM,IAAIY,MAAM,iCAElBC,YAAoBpC,KAAKkC,MAAzB,MAAqC,WACnC,MAAM,IAAIC,MAAM,2CAPxB,UAAAX,EAAAI,EAAAS,KAAAZ,EAAAa,EAAAd,EAAA,IAEWX,EAFXY,EAAA,IAWec,QAXf,CAAAX,EAAAE,KAAA,cAYY,IAAIK,MAAM,wCAZtB,OAeInC,KAAKC,OAASY,EAflBe,EAAAE,KAAA,iBAAAF,EAAAC,KAAA,GAAAD,EAAAY,GAAAZ,EAAA,SAkBIa,QAAQC,MAARd,EAAAY,IACAG,MAAMf,EAAAY,GAAII,SAAW,mCACrBC,QAAQC,OApBZ,yBAAAlB,EAAAmB,SAAAzB,EAAAtB,KAAA,yRAwBA,WACE,OAAOgD,YAAPC,UA7D4BC","file":"chunk.35929da61d769e57c884.js","sourcesContent":["import {\n LitElement,\n customElement,\n property,\n TemplateResult,\n html,\n PropertyValues,\n CSSResult,\n css,\n} from \"lit-element\";\nimport { HomeAssistant, Route } from \"../../../src/types\";\nimport { createHassioSession } from \"../../../src/data/hassio/supervisor\";\nimport {\n HassioAddonDetails,\n fetchHassioAddonInfo,\n} from \"../../../src/data/hassio/addon\";\nimport \"../../../src/layouts/hass-loading-screen\";\nimport \"../../../src/layouts/hass-subpage\";\n\n@customElement(\"hassio-ingress-view\")\nclass HassioIngressView extends LitElement {\n @property() public hass!: HomeAssistant;\n @property() public route!: Route;\n @property() private _addon?: HassioAddonDetails;\n\n protected render(): TemplateResult {\n if (!this._addon) {\n return html`\n <hass-loading-screen></hass-loading-screen>\n `;\n }\n\n return html`\n <hass-subpage .header=${this._addon.name} hassio>\n <iframe src=${this._addon.ingress_url}></iframe>\n </hass-subpage>\n `;\n }\n\n protected updated(changedProps: PropertyValues) {\n super.firstUpdated(changedProps);\n\n if (!changedProps.has(\"route\")) {\n return;\n }\n\n const addon = this.route.path.substr(1);\n\n const oldRoute = changedProps.get(\"route\") as this[\"route\"] | undefined;\n const oldAddon = oldRoute ? oldRoute.path.substr(1) : undefined;\n\n if (addon && addon !== oldAddon) {\n this._fetchData(addon);\n }\n }\n\n private async _fetchData(addonSlug: string) {\n try {\n const [addon] = await Promise.all([\n fetchHassioAddonInfo(this.hass, addonSlug).catch(() => {\n throw new Error(\"Failed to fetch add-on info\");\n }),\n createHassioSession(this.hass).catch(() => {\n throw new Error(\"Failed to create an ingress session\");\n }),\n ]);\n\n if (!addon.ingress) {\n throw new Error(\"This add-on does not support ingress\");\n }\n\n this._addon = addon;\n } catch (err) {\n // tslint:disable-next-line\n console.error(err);\n alert(err.message || \"Unknown error starting ingress.\");\n history.back();\n }\n }\n\n static get styles(): CSSResult {\n return css`\n iframe {\n display: block;\n width: 100%;\n height: 100%;\n border: 0;\n }\n paper-icon-button {\n color: var(--text-primary-color);\n }\n `;\n }\n}\n\ndeclare global {\n interface HTMLElementTagNameMap {\n \"hassio-ingress-view\": HassioIngressView;\n }\n}\n"],"sourceRoot":""}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user