mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-09-13 15:09:33 +00:00
Compare commits
795 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
598108d294 | ||
![]() |
a0261dbbcc | ||
![]() |
2418122b46 | ||
![]() |
f104e60afa | ||
![]() |
ed45f27f3e | ||
![]() |
40aa5c9caf | ||
![]() |
14b1ea4eb0 | ||
![]() |
5052a339e3 | ||
![]() |
2321890dde | ||
![]() |
4cb5770ee0 | ||
![]() |
3a35561d1d | ||
![]() |
6fbec53f8a | ||
![]() |
c707934018 | ||
![]() |
efd8efa248 | ||
![]() |
979861b764 | ||
![]() |
cdc53a159c | ||
![]() |
a203ed9cc5 | ||
![]() |
5cab5f0c08 | ||
![]() |
25ea80e169 | ||
![]() |
f43b4e9e24 | ||
![]() |
160fbb2589 | ||
![]() |
c85aa664e1 | ||
![]() |
51dcbf5db7 | ||
![]() |
fa114a4a03 | ||
![]() |
d7fd58bdb9 | ||
![]() |
38b0aea8e2 | ||
![]() |
41eade9325 | ||
![]() |
e64cf41aec | ||
![]() |
02872b5e75 | ||
![]() |
e4d49bb459 | ||
![]() |
d38b7d5a82 | ||
![]() |
537c5d3197 | ||
![]() |
575df2fcf6 | ||
![]() |
c08c3c6b37 | ||
![]() |
2acf28609e | ||
![]() |
bb59d0431e | ||
![]() |
1c7b1f1462 | ||
![]() |
f32d17d924 | ||
![]() |
928a4d8dce | ||
![]() |
dd3ba93308 | ||
![]() |
7e1b179cdd | ||
![]() |
a9a2c35f06 | ||
![]() |
58b88a6919 | ||
![]() |
f937876a1b | ||
![]() |
8193f43634 | ||
![]() |
1d3f880f82 | ||
![]() |
ef2fa8d2e2 | ||
![]() |
51997b3e7c | ||
![]() |
98785b00e2 | ||
![]() |
8d3694884d | ||
![]() |
a2821a98ad | ||
![]() |
8d552ae15c | ||
![]() |
6db4c60f47 | ||
![]() |
805c0385a0 | ||
![]() |
cea6e7a9f2 | ||
![]() |
127073c01b | ||
![]() |
30fe36ae05 | ||
![]() |
58bd677832 | ||
![]() |
1a3b369dd7 | ||
![]() |
6e38216abd | ||
![]() |
efcfc1f841 | ||
![]() |
8dea50ce83 | ||
![]() |
7a5a01bdcc | ||
![]() |
bd1450a682 | ||
![]() |
c538c1ce7f | ||
![]() |
b6d59c4f64 | ||
![]() |
a758ccaf5c | ||
![]() |
e8b04cc20a | ||
![]() |
9bcb15dbc0 | ||
![]() |
1e953167b6 | ||
![]() |
979586cdb2 | ||
![]() |
cd31fad56d | ||
![]() |
ff57d88e2a | ||
![]() |
06cb5e171e | ||
![]() |
a8b70a2e13 | ||
![]() |
948019ccee | ||
![]() |
89ed109505 | ||
![]() |
fae246c503 | ||
![]() |
2411b4287d | ||
![]() |
b3308ecbe0 | ||
![]() |
3541cbff5e | ||
![]() |
838ba7ff36 | ||
![]() |
e9802f92c9 | ||
![]() |
016fd24859 | ||
![]() |
d315e81ab2 | ||
![]() |
97c38b8534 | ||
![]() |
011e2b3df5 | ||
![]() |
e3ee9a299f | ||
![]() |
d73c10f874 | ||
![]() |
9e448b46ba | ||
![]() |
9f09c46789 | ||
![]() |
fe6634551a | ||
![]() |
22a7931a7c | ||
![]() |
94f112512f | ||
![]() |
b6509dca1f | ||
![]() |
620234e708 | ||
![]() |
d50e866cec | ||
![]() |
76ad6dca02 | ||
![]() |
cdb1520a63 | ||
![]() |
bbef706a33 | ||
![]() |
835509901f | ||
![]() |
b51f9586c4 | ||
![]() |
fc83cb9559 | ||
![]() |
f5f5f829ac | ||
![]() |
930eed4500 | ||
![]() |
01a8b58054 | ||
![]() |
eba1d01fc2 | ||
![]() |
84755836c9 | ||
![]() |
c9585033cb | ||
![]() |
2d312c276f | ||
![]() |
3b0d0e9928 | ||
![]() |
8307b153e3 | ||
![]() |
dfaffe3ec5 | ||
![]() |
8d7b15cbeb | ||
![]() |
00969a67ac | ||
![]() |
a374d4e817 | ||
![]() |
f5dda39f63 | ||
![]() |
fb5d54d5fe | ||
![]() |
d392b35fdd | ||
![]() |
3ceec006ac | ||
![]() |
62a574c6bd | ||
![]() |
821c10b2bd | ||
![]() |
fa3269a098 | ||
![]() |
a9bdab4b49 | ||
![]() |
0df5b7d87b | ||
![]() |
4861fc70ce | ||
![]() |
47c443bb92 | ||
![]() |
9cb4b49597 | ||
![]() |
865523fd37 | ||
![]() |
1df35a6fe1 | ||
![]() |
e70c9d8a30 | ||
![]() |
7d6b00ea4a | ||
![]() |
e5fc985915 | ||
![]() |
71ccaa2bd0 | ||
![]() |
e127f23a08 | ||
![]() |
495f9f2373 | ||
![]() |
27274286db | ||
![]() |
85ba886029 | ||
![]() |
2f3a868e42 | ||
![]() |
a51b80f456 | ||
![]() |
f27a426879 | ||
![]() |
19ca485c28 | ||
![]() |
7deed55c2d | ||
![]() |
4c5c6f072c | ||
![]() |
f174e08ad6 | ||
![]() |
2658f95347 | ||
![]() |
311c981d1a | ||
![]() |
d6d3bf0583 | ||
![]() |
a1a601a4d3 | ||
![]() |
14776eae76 | ||
![]() |
bef4034ab8 | ||
![]() |
ad988f2a24 | ||
![]() |
6599ae0ee0 | ||
![]() |
4f1ed690cd | ||
![]() |
4ffaee6013 | ||
![]() |
e1ce19547e | ||
![]() |
039040b972 | ||
![]() |
7a1af3d346 | ||
![]() |
1e98774b62 | ||
![]() |
4b4d6c6866 | ||
![]() |
65ff83d359 | ||
![]() |
e509c804ae | ||
![]() |
992827e225 | ||
![]() |
083e97add8 | ||
![]() |
05378d18c0 | ||
![]() |
3dd465acc9 | ||
![]() |
8f6e36f781 | ||
![]() |
85fe56db57 | ||
![]() |
8e07429e47 | ||
![]() |
ced6d702b9 | ||
![]() |
25d7de4dfa | ||
![]() |
82754c0dfe | ||
![]() |
e604b022ee | ||
![]() |
6b29022822 | ||
![]() |
2e671cc5ee | ||
![]() |
f25692b98c | ||
![]() |
c4a011b261 | ||
![]() |
a935bac20b | ||
![]() |
0a3a98cb42 | ||
![]() |
adb39ca93f | ||
![]() |
5fdc340e58 | ||
![]() |
bb64dca6e6 | ||
![]() |
685788bcdf | ||
![]() |
e949aa35f3 | ||
![]() |
fc80bf0df4 | ||
![]() |
bd9740e866 | ||
![]() |
3a260a8fd9 | ||
![]() |
c87e6a5a42 | ||
![]() |
8bc3319523 | ||
![]() |
bdfcf1a2df | ||
![]() |
7f4284f2af | ||
![]() |
fd69120aa6 | ||
![]() |
5df60b17e8 | ||
![]() |
cb835b5ae6 | ||
![]() |
9eab92513a | ||
![]() |
29e8f50ab8 | ||
![]() |
aa0496b236 | ||
![]() |
06e9cec21a | ||
![]() |
0fe27088df | ||
![]() |
54d226116d | ||
![]() |
4b37e30680 | ||
![]() |
7c5f710deb | ||
![]() |
5a3ebaf683 | ||
![]() |
233da0e48f | ||
![]() |
96380d8d28 | ||
![]() |
c84a0edf20 | ||
![]() |
a3cf445c93 | ||
![]() |
3f31979f66 | ||
![]() |
44416edfd2 | ||
![]() |
351c45da75 | ||
![]() |
e27c5dad15 | ||
![]() |
dc510f22ac | ||
![]() |
1b78011f8b | ||
![]() |
a908828bf4 | ||
![]() |
55b7eb62f6 | ||
![]() |
10e8fcf3b9 | ||
![]() |
f1b0c05447 | ||
![]() |
de22bd688e | ||
![]() |
9fe35b4fb5 | ||
![]() |
f13d08d37a | ||
![]() |
a0ecb46584 | ||
![]() |
0c57df0c8e | ||
![]() |
9c902c5c69 | ||
![]() |
af412c3105 | ||
![]() |
ec43448163 | ||
![]() |
9f7e0ecd55 | ||
![]() |
e50515a17c | ||
![]() |
7c345db6fe | ||
![]() |
51c2268c1e | ||
![]() |
51feca05a5 | ||
![]() |
3889504292 | ||
![]() |
7bd6ff374a | ||
![]() |
44fa34203a | ||
![]() |
ff351c7f6d | ||
![]() |
960b00d85a | ||
![]() |
18e3eacd7f | ||
![]() |
f4a1da33c4 | ||
![]() |
49de5be44e | ||
![]() |
383657e8ce | ||
![]() |
3af970ead6 | ||
![]() |
6caec79958 | ||
![]() |
33bbd92d9b | ||
![]() |
9dba78fbcd | ||
![]() |
630d85ec78 | ||
![]() |
f0d46e8671 | ||
![]() |
db0593f0b2 | ||
![]() |
1d83c0c77a | ||
![]() |
5e5fd3a79b | ||
![]() |
c61995aab8 | ||
![]() |
37c393f857 | ||
![]() |
8e043a01c1 | ||
![]() |
c7b6b2ddb3 | ||
![]() |
522f68bf68 | ||
![]() |
7d4866234f | ||
![]() |
7aa5bcfc7c | ||
![]() |
04b59f0896 | ||
![]() |
796f9a203e | ||
![]() |
22c8cda0d7 | ||
![]() |
1cf534ccc5 | ||
![]() |
6d8c821148 | ||
![]() |
264e9665b0 | ||
![]() |
53fa8e48c0 | ||
![]() |
e406aa4144 | ||
![]() |
4953ba5077 | ||
![]() |
0a97ac0578 | ||
![]() |
56af4752f4 | ||
![]() |
81413d08ed | ||
![]() |
2bc2a476d9 | ||
![]() |
4d070a65c6 | ||
![]() |
6185fbaf26 | ||
![]() |
698a126b93 | ||
![]() |
acf921f55d | ||
![]() |
f5a78c88f8 | ||
![]() |
206ece1575 | ||
![]() |
a8028dbe10 | ||
![]() |
c605af6ccc | ||
![]() |
b7b8e6c40e | ||
![]() |
3fcb1de419 | ||
![]() |
12034fe5fc | ||
![]() |
56959d781a | ||
![]() |
9a2f025646 | ||
![]() |
12cc163058 | ||
![]() |
74971d9753 | ||
![]() |
a9157e3a9f | ||
![]() |
b96697b708 | ||
![]() |
81e6896391 | ||
![]() |
2dcaa3608d | ||
![]() |
e21671ec5e | ||
![]() |
7841f14163 | ||
![]() |
cc9f594ab4 | ||
![]() |
ebfaaeaa6b | ||
![]() |
ffa91e150d | ||
![]() |
06fa9f9a9e | ||
![]() |
9f203c42ec | ||
![]() |
5d0d34a4af | ||
![]() |
c2cfc0d3d4 | ||
![]() |
0f4810d41f | ||
![]() |
175848f2a8 | ||
![]() |
472bd66f4d | ||
![]() |
168ea32d2c | ||
![]() |
e82d6b1ea4 | ||
![]() |
6c60ca088c | ||
![]() |
83e8f935fd | ||
![]() |
71867302a4 | ||
![]() |
8bcc402c5f | ||
![]() |
72b7d2a123 | ||
![]() |
20c1183450 | ||
![]() |
0bbfbd2544 | ||
![]() |
350bd9c32f | ||
![]() |
dcca8b0a9a | ||
![]() |
f77b479e45 | ||
![]() |
216565affb | ||
![]() |
6f235c2a11 | ||
![]() |
27a770bd1d | ||
![]() |
ef15b67571 | ||
![]() |
6aad966c52 | ||
![]() |
9811f11859 | ||
![]() |
13148ec7fb | ||
![]() |
b2d7464790 | ||
![]() |
ce84e185ad | ||
![]() |
c3f5ee43b6 | ||
![]() |
e2dc1a4471 | ||
![]() |
e787e59b49 | ||
![]() |
f0ed2eba2b | ||
![]() |
2364e1e652 | ||
![]() |
cc56944d75 | ||
![]() |
69cea9fc96 | ||
![]() |
fcebc9d1ed | ||
![]() |
9350e4f961 | ||
![]() |
387e0ad03e | ||
![]() |
61fec8b290 | ||
![]() |
1228baebf4 | ||
![]() |
a30063e85c | ||
![]() |
524cebac4d | ||
![]() |
c94114a566 | ||
![]() |
b6ec7a9e64 | ||
![]() |
69be7a6d22 | ||
![]() |
58155c35f9 | ||
![]() |
7b2377291f | ||
![]() |
657ee84e39 | ||
![]() |
2e4b545265 | ||
![]() |
2de1d35dd1 | ||
![]() |
2b082b362d | ||
![]() |
dfdd0d6b4b | ||
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a | ||
![]() |
39baea759a | ||
![]() |
80ddb1d262 | ||
![]() |
e24987a610 | ||
![]() |
9e5c276e3b | ||
![]() |
c33d31996d | ||
![]() |
aa1f08fe8a | ||
![]() |
d78689554a | ||
![]() |
5bee1d851c | ||
![]() |
ddb8eef4d1 | ||
![]() |
da513e7347 | ||
![]() |
4279d7fd16 | ||
![]() |
934eab2e8c | ||
![]() |
2a31edc768 | ||
![]() |
fcdd66dc6e | ||
![]() |
a65d3222b9 | ||
![]() |
36179596a0 | ||
![]() |
c083c850c1 | ||
![]() |
ff903d7b5a | ||
![]() |
dd603e1ec2 | ||
![]() |
a2f06b1553 | ||
![]() |
8115d2b3d3 | ||
![]() |
4f97bb9e0b | ||
![]() |
84d24a2c4d | ||
![]() |
b709061656 | ||
![]() |
cd9034b3f1 | ||
![]() |
25d324c73a | ||
![]() |
3a834d1a73 | ||
![]() |
e9fecb817d | ||
![]() |
56e70d7ec4 | ||
![]() |
2e73a85aa9 | ||
![]() |
1e119e9c03 | ||
![]() |
6f6e5c97df | ||
![]() |
6ef99974cf | ||
![]() |
8984b9aef6 | ||
![]() |
63e08b15bc | ||
![]() |
319b2b5d4c | ||
![]() |
bae7bb8ce4 | ||
![]() |
0b44df366c | ||
![]() |
f253c797af | ||
![]() |
0a8b1c2797 | ||
![]() |
3b45fb417b | ||
![]() |
2a2d92e3c5 | ||
![]() |
a320e42ed5 | ||
![]() |
fdef712e01 | ||
![]() |
5717ac19d7 | ||
![]() |
33d7d76fee | ||
![]() |
73bdaa623c | ||
![]() |
8ca8f59a0b | ||
![]() |
745af3c039 | ||
![]() |
5d17e1011a | ||
![]() |
826464c41b | ||
![]() |
a643df8cac | ||
![]() |
24ded99286 | ||
![]() |
6646eee504 | ||
![]() |
f55c10914e | ||
![]() |
b1e768f69e | ||
![]() |
4702f8bd5e | ||
![]() |
69959b2c97 | ||
![]() |
9d6f4f5392 | ||
![]() |
36b9a609bf | ||
![]() |
36ae0c82b6 | ||
![]() |
e11011ee51 | ||
![]() |
9125211a57 | ||
![]() |
3a4ef6ceb3 | ||
![]() |
ca82993278 | ||
![]() |
0925af91e3 | ||
![]() |
80bc32243c | ||
![]() |
f0d232880d | ||
![]() |
7c790dbbd9 | ||
![]() |
899b17e992 | ||
![]() |
d1b4521290 | ||
![]() |
9bb4feef29 | ||
![]() |
4bcdc98a31 | ||
![]() |
26f8c1df92 | ||
![]() |
a481ad73f3 | ||
![]() |
e4ac17fea6 | ||
![]() |
bcd940e95b | ||
![]() |
5365aa4466 | ||
![]() |
a0d106529c | ||
![]() |
bf1a9ec42d | ||
![]() |
fc5d97562f | ||
![]() |
f5c171e44f | ||
![]() |
a3c3f15806 | ||
![]() |
ef58a219ec | ||
![]() |
6708fe36e3 | ||
![]() |
e02fa2824c | ||
![]() |
a20f927082 | ||
![]() |
6d71e3fe81 | ||
![]() |
4056fcd75d | ||
![]() |
1e723cf0e3 | ||
![]() |
ce3f670597 | ||
![]() |
ce3d3d58ec | ||
![]() |
a92cab48e0 | ||
![]() |
ee76317392 | ||
![]() |
380ca13be1 | ||
![]() |
93f4c5e207 | ||
![]() |
e438858da0 | ||
![]() |
428a4dd849 | ||
![]() |
39cc8aaa13 | ||
![]() |
39a62864de | ||
![]() |
71a162a871 | ||
![]() |
05d7eff09a | ||
![]() |
7b8ad0782d | ||
![]() |
df3e9e3a5e | ||
![]() |
8cdc769ec8 | ||
![]() |
76e1304241 | ||
![]() |
eb9b1ff03d | ||
![]() |
b3b12d35fd | ||
![]() |
74485262e7 | ||
![]() |
615e68b29b | ||
![]() |
927b4695c9 | ||
![]() |
11811701d0 | ||
![]() |
05c8022db3 | ||
![]() |
a9ebb147c5 | ||
![]() |
ba8ca4d9ee | ||
![]() |
3574df1385 | ||
![]() |
b4497d231b | ||
![]() |
5aa9b0245a | ||
![]() |
4c72c3aafc | ||
![]() |
bf4f40f991 | ||
![]() |
603334f4f3 | ||
![]() |
46548af165 | ||
![]() |
8ef32b40c8 | ||
![]() |
fb25377087 | ||
![]() |
a75fd2d07e | ||
![]() |
e30f39e97e | ||
![]() |
4818ad7465 | ||
![]() |
5e4e9740c7 | ||
![]() |
d4e41dbf80 | ||
![]() |
cea1a1a15f | ||
![]() |
c2700b14dc | ||
![]() |
07d27170db | ||
![]() |
8eb8c07df6 | ||
![]() |
7bee6f884c | ||
![]() |
78dd20e314 | ||
![]() |
2a011b6448 | ||
![]() |
5c90370ec8 | ||
![]() |
120465b88d | ||
![]() |
c77292439a | ||
![]() |
0a0209f81a | ||
![]() |
69a7ed8a5c | ||
![]() |
8df35ab488 | ||
![]() |
a12567d0a8 | ||
![]() |
64fe190119 | ||
![]() |
e3ede66943 | ||
![]() |
2672b800d4 | ||
![]() |
c60d4bda92 | ||
![]() |
db9d0f2639 | ||
![]() |
02d4045ec3 | ||
![]() |
a308ea6927 | ||
![]() |
edc5e5e812 | ||
![]() |
23b65cb479 | ||
![]() |
e5eabd2143 | ||
![]() |
b0dd043975 | ||
![]() |
435a1096ed | ||
![]() |
21a9084ca0 | ||
![]() |
10d9135d86 | ||
![]() |
272d8b29f3 | ||
![]() |
3d665b9eec | ||
![]() |
c563f484c9 | ||
![]() |
38268ea4ea | ||
![]() |
c1ad64cddf | ||
![]() |
b898cd2a3a | ||
![]() |
937b31d845 | ||
![]() |
e4e655493b | ||
![]() |
387d2dcc2e | ||
![]() |
8abe33d48a | ||
![]() |
860442d5c4 | ||
![]() |
ce5183ce16 | ||
![]() |
3e69b04b86 | ||
![]() |
8b9cd4f122 | ||
![]() |
c0e3ccdb83 | ||
![]() |
e8cc85c487 | ||
![]() |
b3eff41692 | ||
![]() |
1ea63f185c | ||
![]() |
a513d5c09a | ||
![]() |
fb8216c102 | ||
![]() |
4f381d01df | ||
![]() |
de3382226e | ||
![]() |
77be830b72 | ||
![]() |
09c0e1320f | ||
![]() |
cc4ee59542 | ||
![]() |
1f448744f3 | ||
![]() |
ee2c257057 | ||
![]() |
be8439d4ac | ||
![]() |
981f2b193c | ||
![]() |
39087e09ce | ||
![]() |
59960efb9c | ||
![]() |
5a53bb5981 | ||
![]() |
a67fe69cbb | ||
![]() |
9ce2b0765f | ||
![]() |
2e53a48504 | ||
![]() |
8e4db0c3ec | ||
![]() |
4072b06faf | ||
![]() |
a2cf7ece70 | ||
![]() |
734fe3afde | ||
![]() |
7f3bc91c1d | ||
![]() |
9c2c95757d | ||
![]() |
b5ed6c586a | ||
![]() |
35033d1f76 | ||
![]() |
9e41d0c5b0 | ||
![]() |
62e92fada9 | ||
![]() |
ae0a1a657f | ||
![]() |
81e511ba8e | ||
![]() |
d89cb91c8c | ||
![]() |
dc31b6e6fe | ||
![]() |
930a32de1a | ||
![]() |
e40f2ed8e3 | ||
![]() |
abbd3d1078 | ||
![]() |
63c9948456 | ||
![]() |
b6c81d779a | ||
![]() |
2480c83169 | ||
![]() |
334cc66cf6 | ||
![]() |
3cf189ad94 | ||
![]() |
6ffb94a0f5 | ||
![]() |
3593826441 | ||
![]() |
0a0a62f238 | ||
![]() |
41ce9913d2 | ||
![]() |
b77c42384d | ||
![]() |
138bb12f98 | ||
![]() |
4fe2859f4e | ||
![]() |
0768b2b4bc | ||
![]() |
e6f1772a93 | ||
![]() |
5374b2b3b9 | ||
![]() |
1196788856 | ||
![]() |
9f3f47eb80 | ||
![]() |
1a90a478f2 | ||
![]() |
ee773f3b63 | ||
![]() |
5ffc27f60c | ||
![]() |
4c13dfb43c | ||
![]() |
bc099f0d81 | ||
![]() |
b26dd0af19 | ||
![]() |
0dee5bd763 | ||
![]() |
0765387ad8 | ||
![]() |
a07517bd3c | ||
![]() |
e5f0d80d96 | ||
![]() |
2fc5e3b7d9 | ||
![]() |
778bc46848 | ||
![]() |
882586b246 | ||
![]() |
b7c07a2555 | ||
![]() |
814b504fa9 | ||
![]() |
7ae430e7a8 | ||
![]() |
0e7e95ba20 | ||
![]() |
e577d8acb2 | ||
![]() |
0a76ab5054 | ||
![]() |
03c5596e04 | ||
![]() |
3af4e14e83 | ||
![]() |
7c8cf57820 | ||
![]() |
8d84a8a62e | ||
![]() |
08c45060bd | ||
![]() |
7ca8d2811b | ||
![]() |
bb6898b032 | ||
![]() |
cd86c6814e | ||
![]() |
b67e116650 | ||
![]() |
57ce411fb6 | ||
![]() |
85ed4d9e8d | ||
![]() |
ccb39da569 | ||
![]() |
dd7ba64d32 | ||
![]() |
de3edb1654 | ||
![]() |
d262151727 | ||
![]() |
a37c90af96 | ||
![]() |
0a3a752b4c | ||
![]() |
0a34f427f8 | ||
![]() |
157740e374 | ||
![]() |
b0e994f3f5 | ||
![]() |
f374852801 | ||
![]() |
709f034f2e | ||
![]() |
6d6deb8c66 | ||
![]() |
5771b417bc | ||
![]() |
51efcefdab | ||
![]() |
d31ab5139d | ||
![]() |
ce18183daa | ||
![]() |
b8b73cf880 | ||
![]() |
5291e6c1f3 | ||
![]() |
626a9f06c4 | ||
![]() |
72338eb5b8 | ||
![]() |
7bd77c6e99 | ||
![]() |
69151b962a | ||
![]() |
86305d4fe4 | ||
![]() |
d5c3850a3f | ||
![]() |
3e645b6175 | ||
![]() |
89dc78bc05 | ||
![]() |
164c403d05 | ||
![]() |
5e8007453f | ||
![]() |
0a0d97b084 | ||
![]() |
eb604ed92d | ||
![]() |
c47828dbaa | ||
![]() |
ea437dc745 | ||
![]() |
c16a208b39 | ||
![]() |
55d803b2a0 | ||
![]() |
611f6f2829 | ||
![]() |
b94df76731 | ||
![]() |
218619e7f0 | ||
![]() |
273eed901a | ||
![]() |
8ea712a937 | ||
![]() |
658449a7a0 | ||
![]() |
968c471591 | ||
![]() |
b4665f3907 | ||
![]() |
496cee1ec4 | ||
![]() |
0f8c80f3ba | ||
![]() |
6c28f82239 | ||
![]() |
def32abb57 | ||
![]() |
f57a241b9e | ||
![]() |
11a7e8b15d | ||
![]() |
fa4f7697b7 | ||
![]() |
6098b7de8e | ||
![]() |
0a382ce54d | ||
![]() |
dd53aaa30c | ||
![]() |
31e175a15a | ||
![]() |
4c80727bcc | ||
![]() |
b2c3157361 | ||
![]() |
dc4f38ebd0 | ||
![]() |
7c9437c6ee | ||
![]() |
9ce9e10dfd | ||
![]() |
4e94043bca | ||
![]() |
749d45bf13 | ||
![]() |
ce99b3e259 | ||
![]() |
2c84daefab | ||
![]() |
dc1933fa88 | ||
![]() |
6970cebf80 | ||
![]() |
a234006de2 | ||
![]() |
2484149323 | ||
![]() |
778148424c | ||
![]() |
55f4a2395e | ||
![]() |
5a45d47ed8 | ||
![]() |
da601d1483 | ||
![]() |
e98a1272e9 | ||
![]() |
90e9cf788b | ||
![]() |
ec387c3010 | ||
![]() |
7e5a960c98 | ||
![]() |
f1bcbf2416 | ||
![]() |
bce144e197 | ||
![]() |
86a3735d83 | ||
![]() |
decf254e5f | ||
![]() |
e10fe16f21 | ||
![]() |
996891a740 | ||
![]() |
7385d026ea | ||
![]() |
09f43d6f3c | ||
![]() |
6906e757dd | ||
![]() |
963d242afa | ||
![]() |
3ed7cbe2ed | ||
![]() |
0da924f10b | ||
![]() |
76411da0a7 | ||
![]() |
ce87a72cf0 | ||
![]() |
f8c9e2f295 | ||
![]() |
00af027e51 | ||
![]() |
c91fce3281 | ||
![]() |
fb6df18ce9 | ||
![]() |
31f5c6f938 | ||
![]() |
d3a44b2992 | ||
![]() |
b537a03e6d | ||
![]() |
46093379e4 | ||
![]() |
1b17d90504 | ||
![]() |
7d42dd7ac2 | ||
![]() |
f35dcfcfd3 | ||
![]() |
c4f223c38a | ||
![]() |
71362f2c76 | ||
![]() |
96beac9fd9 | ||
![]() |
608c0e5076 | ||
![]() |
16ef6d82d2 | ||
![]() |
51940222be | ||
![]() |
21f3c4820b | ||
![]() |
214c6f919e | ||
![]() |
d9d438d571 | ||
![]() |
cf60d1f55c | ||
![]() |
f9aa12cbad | ||
![]() |
76266cc18b | ||
![]() |
50b9506ff3 | ||
![]() |
754cd64213 | ||
![]() |
113b62ee77 | ||
![]() |
d9874c4c3e | ||
![]() |
ca44e858c5 | ||
![]() |
c7ca4de307 | ||
![]() |
b77146a4e0 | ||
![]() |
45b4800378 | ||
![]() |
7f9232d2b9 | ||
![]() |
d90426f745 | ||
![]() |
c2deabb672 | ||
![]() |
ead5993f3e | ||
![]() |
1bcd74e8fa | ||
![]() |
118da3c275 | ||
![]() |
d7bb9013d4 | ||
![]() |
812c46d82b | ||
![]() |
c0462b28cd |
49
.devcontainer/Dockerfile
Normal file
49
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,49 @@
|
|||||||
|
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||||
|
|
||||||
|
WORKDIR /workspaces
|
||||||
|
|
||||||
|
# Install Node/Yarn for Frontent
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
curl \
|
||||||
|
git \
|
||||||
|
apt-utils \
|
||||||
|
apt-transport-https \
|
||||||
|
&& curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||||
|
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
nodejs \
|
||||||
|
yarn \
|
||||||
|
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
ENV NVM_DIR /root/.nvm
|
||||||
|
|
||||||
|
# Install docker
|
||||||
|
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
apt-transport-https \
|
||||||
|
ca-certificates \
|
||||||
|
curl \
|
||||||
|
software-properties-common \
|
||||||
|
gpg-agent \
|
||||||
|
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||||
|
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
|
||||||
|
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
docker-ce \
|
||||||
|
docker-ce-cli \
|
||||||
|
containerd.io \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install tools
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
jq \
|
||||||
|
dbus \
|
||||||
|
network-manager \
|
||||||
|
libpulse0 \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Python dependencies from requirements.txt if it exists
|
||||||
|
COPY requirements.txt requirements_tests.txt ./
|
||||||
|
RUN pip3 install -U setuptools pip \
|
||||||
|
&& pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||||
|
&& pip3 install tox \
|
||||||
|
&& rm -f requirements.txt requirements_tests.txt
|
32
.devcontainer/devcontainer.json
Normal file
32
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"name": "Supervisor dev",
|
||||||
|
"context": "..",
|
||||||
|
"dockerFile": "Dockerfile",
|
||||||
|
"appPort": "9123:8123",
|
||||||
|
"postCreateCommand": "pre-commit install",
|
||||||
|
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||||
|
"extensions": [
|
||||||
|
"ms-python.python",
|
||||||
|
"ms-python.vscode-pylance",
|
||||||
|
"visualstudioexptteam.vscodeintellicode",
|
||||||
|
"esbenp.prettier-vscode"
|
||||||
|
],
|
||||||
|
"settings": {
|
||||||
|
"terminal.integrated.shell.linux": "/bin/bash",
|
||||||
|
"editor.formatOnPaste": false,
|
||||||
|
"editor.formatOnSave": true,
|
||||||
|
"editor.formatOnType": true,
|
||||||
|
"files.trimTrailingWhitespace": true,
|
||||||
|
"python.pythonPath": "/usr/local/bin/python3",
|
||||||
|
"python.linting.pylintEnabled": true,
|
||||||
|
"python.linting.enabled": true,
|
||||||
|
"python.formatting.provider": "black",
|
||||||
|
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||||
|
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||||
|
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||||
|
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||||
|
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||||
|
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||||
|
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||||
|
}
|
||||||
|
}
|
@@ -1,13 +1,23 @@
|
|||||||
# General files
|
# General files
|
||||||
.git
|
.git
|
||||||
.github
|
.github
|
||||||
|
.devcontainer
|
||||||
|
.vscode
|
||||||
|
|
||||||
# Test related files
|
# Test related files
|
||||||
.tox
|
.tox
|
||||||
|
|
||||||
# Temporary files
|
# Temporary files
|
||||||
**/__pycache__
|
**/__pycache__
|
||||||
|
.pytest_cache
|
||||||
|
|
||||||
# virtualenv
|
# virtualenv
|
||||||
venv/
|
venv/
|
||||||
ENV/
|
|
||||||
|
# Data
|
||||||
|
home-assistant-polymer/
|
||||||
|
script/
|
||||||
|
tests/
|
||||||
|
|
||||||
|
# Test ENV
|
||||||
|
data/
|
||||||
|
13
.github/ISSUE_TEMPLATE.md
vendored
13
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,15 +1,15 @@
|
|||||||
<!-- READ THIS FIRST:
|
<!-- READ THIS FIRST:
|
||||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/core/releases
|
||||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
- Do not report issues for integrations here, please refer to https://github.com/home-assistant/core/issues
|
||||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||||
- If you have a problem with a Add-on, make a issue on there repository.
|
- If you have a problem with an add-on, make an issue in its repository.
|
||||||
-->
|
-->
|
||||||
|
|
||||||
**Home Assistant release with the issue:**
|
**Home Assistant release with the issue:**
|
||||||
<!--
|
<!--
|
||||||
- Frontend -> Developer tools -> Info
|
- Frontend -> Configuration -> Info
|
||||||
- Or use this command: hass --version
|
- Or use this command: hass --version
|
||||||
-->
|
-->
|
||||||
|
|
||||||
@@ -20,10 +20,9 @@ Please provide details about your environment.
|
|||||||
|
|
||||||
**Supervisor logs:**
|
**Supervisor logs:**
|
||||||
<!--
|
<!--
|
||||||
- Frontend -> Hass.io -> System
|
- Frontend -> Supervisor -> System
|
||||||
- Or use this command: hassio su logs
|
- Or use this command: ha supervisor logs
|
||||||
-->
|
-->
|
||||||
|
|
||||||
|
|
||||||
**Description of problem:**
|
**Description of problem:**
|
||||||
|
|
||||||
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
- package-ecosystem: pip
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "06:00"
|
||||||
|
open-pull-requests-limit: 10
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: daily
|
||||||
|
time: "06:00"
|
||||||
|
open-pull-requests-limit: 10
|
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||||
|
|
||||||
|
# Number of days of inactivity before a closed issue or pull request is locked
|
||||||
|
daysUntilLock: 1
|
||||||
|
|
||||||
|
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||||
|
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||||
|
skipCreatedBefore: 2020-01-01
|
||||||
|
|
||||||
|
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||||
|
exemptLabels: []
|
||||||
|
|
||||||
|
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||||
|
lockLabel: false
|
||||||
|
|
||||||
|
# Comment to post before locking. Set to `false` to disable
|
||||||
|
lockComment: false
|
||||||
|
|
||||||
|
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||||
|
setLockReason: false
|
||||||
|
|
||||||
|
# Limit to only `issues` or `pulls`
|
||||||
|
only: pulls
|
||||||
|
|
||||||
|
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||||
|
issues:
|
||||||
|
daysUntilLock: 30
|
18
.github/stale.yml
vendored
Normal file
18
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
# Number of days of inactivity before an issue becomes stale
|
||||||
|
daysUntilStale: 60
|
||||||
|
# Number of days of inactivity before a stale issue is closed
|
||||||
|
daysUntilClose: 7
|
||||||
|
# Issues with these labels will never be considered stale
|
||||||
|
exemptLabels:
|
||||||
|
- pinned
|
||||||
|
- security
|
||||||
|
- rfc
|
||||||
|
# Label to use when marking an issue as stale
|
||||||
|
staleLabel: stale
|
||||||
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
|
markComment: >
|
||||||
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
|
for your contributions.
|
||||||
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
|
closeComment: false
|
432
.github/workflows/ci.yaml
vendored
Normal file
432
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,432 @@
|
|||||||
|
name: CI
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
- master
|
||||||
|
pull_request: ~
|
||||||
|
|
||||||
|
env:
|
||||||
|
DEFAULT_PYTHON: 3.8
|
||||||
|
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
# Separate job to pre-populate the base dependency cache
|
||||||
|
# This prevent upcoming jobs to do the same individually
|
||||||
|
prepare:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.8]
|
||||||
|
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
id: python
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||||
|
- name: Create Python virtual environment
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
python -m venv venv
|
||||||
|
. venv/bin/activate
|
||||||
|
pip install -U pip setuptools
|
||||||
|
pip install -r requirements.txt -r requirements_tests.txt
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ runner.os }}-pre-commit-
|
||||||
|
- name: Install pre-commit dependencies
|
||||||
|
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit install-hooks
|
||||||
|
|
||||||
|
lint-black:
|
||||||
|
name: Check black
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run black
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
black --target-version py38 --check supervisor tests setup.py
|
||||||
|
|
||||||
|
lint-dockerfile:
|
||||||
|
name: Check Dockerfile
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Register hadolint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
|
- name: Check Dockerfile
|
||||||
|
uses: docker://hadolint/hadolint:v1.18.0
|
||||||
|
with:
|
||||||
|
args: hadolint Dockerfile
|
||||||
|
|
||||||
|
lint-executable-shebangs:
|
||||||
|
name: Check executables
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register check executables problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||||
|
- name: Run executables check
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||||
|
|
||||||
|
lint-flake8:
|
||||||
|
name: Check flake8
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register flake8 problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||||
|
- name: Run flake8
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
flake8 supervisor tests
|
||||||
|
|
||||||
|
lint-isort:
|
||||||
|
name: Check isort
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run isort
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
lint-json:
|
||||||
|
name: Check JSON
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register check-json problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||||
|
- name: Run check-json
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual check-json --all-files
|
||||||
|
|
||||||
|
lint-pylint:
|
||||||
|
name: Check pylint
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Register pylint problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||||
|
- name: Run pylint
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pylint supervisor tests
|
||||||
|
|
||||||
|
lint-pyupgrade:
|
||||||
|
name: Check pyupgrade
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Restore pre-commit environment from cache
|
||||||
|
id: cache-precommit
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ env.PRE_COMMIT_HOME }}
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||||
|
- name: Fail job if cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Run pyupgrade
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||||
|
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: prepare
|
||||||
|
strategy:
|
||||||
|
matrix:
|
||||||
|
python-version: [3.8]
|
||||||
|
name: Run tests Python ${{ matrix.python-version }}
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python-version }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Install additional system dependencies
|
||||||
|
run: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||||
|
- name: Register Python problem matcher
|
||||||
|
run: |
|
||||||
|
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||||
|
- name: Install Pytest Annotation plugin
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
# Ideally this should be part of our dependencies
|
||||||
|
# However this plugin is fairly new and doesn't run correctly
|
||||||
|
# on a non-GitHub environment.
|
||||||
|
pip install pytest-github-actions-annotate-failures
|
||||||
|
- name: Run pytest
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
pytest \
|
||||||
|
-qq \
|
||||||
|
--timeout=10 \
|
||||||
|
--durations=10 \
|
||||||
|
--cov supervisor \
|
||||||
|
-o console_output_style=count \
|
||||||
|
tests
|
||||||
|
- name: Upload coverage artifact
|
||||||
|
uses: actions/upload-artifact@v2.1.4
|
||||||
|
with:
|
||||||
|
name: coverage-${{ matrix.python-version }}
|
||||||
|
path: .coverage
|
||||||
|
|
||||||
|
coverage:
|
||||||
|
name: Process test coverage
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: pytest
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
|
uses: actions/setup-python@v2.1.2
|
||||||
|
id: python
|
||||||
|
with:
|
||||||
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
- name: Restore Python virtual environment
|
||||||
|
id: cache-venv
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: venv
|
||||||
|
key: |
|
||||||
|
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||||
|
- name: Fail job if Python cache restore failed
|
||||||
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
|
run: |
|
||||||
|
echo "Failed to restore Python virtual environment from cache"
|
||||||
|
exit 1
|
||||||
|
- name: Download all coverage artifacts
|
||||||
|
uses: actions/download-artifact@v2
|
||||||
|
- name: Combine coverage results
|
||||||
|
run: |
|
||||||
|
. venv/bin/activate
|
||||||
|
coverage combine coverage*/.coverage*
|
||||||
|
coverage report
|
||||||
|
coverage xml
|
||||||
|
- name: Upload coverage to Codecov
|
||||||
|
uses: codecov/codecov-action@v1.0.13
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "check-executables-have-shebangs",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):\\s(.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"message": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "check-json",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||||
|
"file": 1,
|
||||||
|
"message": 2,
|
||||||
|
"line": 3,
|
||||||
|
"column": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "flake8-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "flake8-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "hadolint",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"message": 3,
|
||||||
|
"code": 4
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "pylint-error",
|
||||||
|
"severity": "error",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"owner": "pylint-warning",
|
||||||
|
"severity": "warning",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2,
|
||||||
|
"column": 3,
|
||||||
|
"message": 4,
|
||||||
|
"code": 5
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"problemMatcher": [
|
||||||
|
{
|
||||||
|
"owner": "python",
|
||||||
|
"pattern": [
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||||
|
"file": 1,
|
||||||
|
"line": 2
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||||
|
"message": 2
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
15
.github/workflows/release-drafter.yml
vendored
Normal file
15
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
name: Release Drafter
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
# branches to consider in the event; optional, defaults to all
|
||||||
|
branches:
|
||||||
|
- dev
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
update_release_draft:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: release-drafter/release-drafter@v5
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
21
.github/workflows/sentry.yaml
vendored
Normal file
21
.github/workflows/sentry.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
name: Sentry Release
|
||||||
|
|
||||||
|
# yamllint disable-line rule:truthy
|
||||||
|
on:
|
||||||
|
release:
|
||||||
|
types: [published, prereleased]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
createSentryRelease:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Check out code from GitHub
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
- name: Sentry Release
|
||||||
|
uses: getsentry/action-release@v1.0.1
|
||||||
|
env:
|
||||||
|
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||||
|
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||||
|
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||||
|
with:
|
||||||
|
environment: production
|
9
.gitignore
vendored
9
.gitignore
vendored
@@ -92,4 +92,11 @@ ENV/
|
|||||||
.pylint.d/
|
.pylint.d/
|
||||||
|
|
||||||
# VS Code
|
# VS Code
|
||||||
.vscode/
|
.vscode/*
|
||||||
|
!.vscode/cSpell.json
|
||||||
|
!.vscode/tasks.json
|
||||||
|
!.vscode/launch.json
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
/.mypy_cache/*
|
||||||
|
/.dmypy.json
|
||||||
|
5
.hadolint.yaml
Normal file
5
.hadolint.yaml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
ignored:
|
||||||
|
- DL3018
|
||||||
|
- DL3006
|
||||||
|
- DL3013
|
||||||
|
- SC2155
|
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 20.8b1
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
args:
|
||||||
|
- --safe
|
||||||
|
- --quiet
|
||||||
|
- --target-version
|
||||||
|
- py38
|
||||||
|
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||||
|
- repo: https://gitlab.com/pycqa/flake8
|
||||||
|
rev: 3.8.3
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
additional_dependencies:
|
||||||
|
- flake8-docstrings==1.5.0
|
||||||
|
- pydocstyle==5.0.2
|
||||||
|
files: ^(supervisor|script|tests)/.+\.py$
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v3.1.0
|
||||||
|
hooks:
|
||||||
|
- id: check-executables-have-shebangs
|
||||||
|
stages: [manual]
|
||||||
|
- id: check-json
|
||||||
|
- repo: https://github.com/pre-commit/mirrors-isort
|
||||||
|
rev: v4.3.21
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.6.2
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py37-plus]
|
18
.vscode/launch.json
vendored
Normal file
18
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
{
|
||||||
|
"version": "0.2.0",
|
||||||
|
"configurations": [
|
||||||
|
{
|
||||||
|
"name": "Supervisor remote debug",
|
||||||
|
"type": "python",
|
||||||
|
"request": "attach",
|
||||||
|
"port": 33333,
|
||||||
|
"host": "172.30.32.2",
|
||||||
|
"pathMappings": [
|
||||||
|
{
|
||||||
|
"localRoot": "${workspaceFolder}",
|
||||||
|
"remoteRoot": "/usr/src/supervisor"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
90
.vscode/tasks.json
vendored
Normal file
90
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,90 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "Run Testenv",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./scripts/test_env.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Run Testenv CLI",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Update UI",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "./scripts/update-frontend.sh",
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pytest",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pytest --timeout=10 tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Flake8",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "flake8 supervisor tests",
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Pylint",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "pylint supervisor",
|
||||||
|
"dependsOn": ["Install all Requirements"],
|
||||||
|
"group": {
|
||||||
|
"kind": "test",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"reveal": "always",
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
57
Dockerfile
57
Dockerfile
@@ -1,33 +1,40 @@
|
|||||||
ARG BUILD_FROM
|
ARG BUILD_FROM
|
||||||
FROM $BUILD_FROM
|
FROM $BUILD_FROM
|
||||||
|
|
||||||
|
ENV \
|
||||||
|
S6_SERVICES_GRACETIME=10000 \
|
||||||
|
SUPERVISOR_API=http://localhost
|
||||||
|
|
||||||
# Install base
|
# Install base
|
||||||
RUN apk add --no-cache \
|
RUN \
|
||||||
openssl \
|
apk add --no-cache \
|
||||||
libffi \
|
eudev \
|
||||||
musl \
|
eudev-libs \
|
||||||
git \
|
git \
|
||||||
socat \
|
glib \
|
||||||
glib \
|
libffi \
|
||||||
libstdc++ \
|
libpulse \
|
||||||
eudev-libs
|
musl \
|
||||||
|
openssl
|
||||||
|
|
||||||
|
ARG BUILD_ARCH
|
||||||
|
WORKDIR /usr/src
|
||||||
|
|
||||||
# Install requirements
|
# Install requirements
|
||||||
COPY requirements.txt /usr/src/
|
COPY requirements.txt .
|
||||||
RUN apk add --no-cache --virtual .build-dependencies \
|
RUN \
|
||||||
make \
|
export MAKEFLAGS="-j$(nproc)" \
|
||||||
g++ \
|
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||||
openssl-dev \
|
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||||
libffi-dev \
|
-r ./requirements.txt \
|
||||||
musl-dev \
|
&& rm -f requirements.txt
|
||||||
&& export MAKEFLAGS="-j$(nproc)" \
|
|
||||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
|
||||||
&& apk del .build-dependencies \
|
|
||||||
&& rm -f /usr/src/requirements.txt
|
|
||||||
|
|
||||||
# Install HassIO
|
# Install Home Assistant Supervisor
|
||||||
COPY . /usr/src/hassio
|
COPY . supervisor
|
||||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
RUN \
|
||||||
&& rm -rf /usr/src/hassio
|
pip3 install --no-cache-dir -e ./supervisor \
|
||||||
|
&& python3 -m compileall ./supervisor/supervisor
|
||||||
|
|
||||||
CMD [ "python3", "-m", "hassio" ]
|
|
||||||
|
WORKDIR /
|
||||||
|
COPY rootfs /
|
||||||
|
4
LICENSE
4
LICENSE
@@ -178,7 +178,7 @@
|
|||||||
APPENDIX: How to apply the Apache License to your work.
|
APPENDIX: How to apply the Apache License to your work.
|
||||||
|
|
||||||
To apply the Apache License to your work, attach the following
|
To apply the Apache License to your work, attach the following
|
||||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||||
replaced with your own identifying information. (Don't include
|
replaced with your own identifying information. (Don't include
|
||||||
the brackets!) The text should be enclosed in the appropriate
|
the brackets!) The text should be enclosed in the appropriate
|
||||||
comment syntax for the file format. We also recommend that a
|
comment syntax for the file format. We also recommend that a
|
||||||
@@ -186,7 +186,7 @@
|
|||||||
same "printed page" as the copyright notice for easier
|
same "printed page" as the copyright notice for easier
|
||||||
identification within third-party archives.
|
identification within third-party archives.
|
||||||
|
|
||||||
Copyright 2017 Pascal Vizeli
|
Copyright [yyyy] [name of copyright owner]
|
||||||
|
|
||||||
Licensed under the Apache License, Version 2.0 (the "License");
|
Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
you may not use this file except in compliance with the License.
|
you may not use this file except in compliance with the License.
|
||||||
|
@@ -1,3 +1,3 @@
|
|||||||
include LICENSE.md
|
include LICENSE.md
|
||||||
graft hassio
|
graft supervisor
|
||||||
recursive-exclude * *.py[co]
|
recursive-exclude * *.py[co]
|
||||||
|
28
README.md
28
README.md
@@ -1,28 +1,26 @@
|
|||||||
# Hass.io
|
# Home Assistant Supervisor
|
||||||
|
|
||||||
## First private cloud solution for home automation
|
## First private cloud solution for home automation
|
||||||
|
|
||||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
Home Assistant (former Hass.io) is a container-based system for managing your
|
||||||
and related applications. The system is controlled via Home Assistant which
|
Home Assistant Core installation and related applications. The system is
|
||||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
controlled via Home Assistant which communicates with the Supervisor. The
|
||||||
installation. This includes changing network settings or installing
|
Supervisor provides an API to manage the installation. This includes changing
|
||||||
and updating software.
|
network settings or installing and updating software.
|
||||||
|
|
||||||

|
|
||||||
|
|
||||||
## Installation
|
## Installation
|
||||||
|
|
||||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
Installation instructions can be found at https://home-assistant.io/hassio.
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
The development of the Supervisor is not difficult but tricky.
|
||||||
|
|
||||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-build/tree/master/builder
|
- You can use the builder to create your Supervisor: https://github.com/home-assistant/hassio-builder
|
||||||
- Go into a HassOS device or VM and pull your supervisor.
|
- Access a HassOS device or VM and pull your Supervisor.
|
||||||
- Set the developer modus on updater.json
|
- Set the developer modus with the CLI tool: `ha supervisor options --channel=dev`
|
||||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
- Restart the service with `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||||
- Test your changes
|
- Test your changes
|
||||||
|
|
||||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
For small bugfixes or improvements, make a PR. For significant changes open a RFC first, please. Thanks.
|
||||||
|
52
azure-pipelines-ci.yml
Normal file
52
azure-pipelines-ci.yml
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
# https://dev.azure.com/home-assistant
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
batch: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- master
|
||||||
|
- dev
|
||||||
|
pr:
|
||||||
|
- dev
|
||||||
|
variables:
|
||||||
|
- name: versionHadolint
|
||||||
|
value: "v1.16.3"
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: "Tox"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- script: |
|
||||||
|
sudo apt-get update
|
||||||
|
sudo apt-get install -y libpulse0 libudev1
|
||||||
|
displayName: "Install Host library"
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: "Use Python 3.8"
|
||||||
|
inputs:
|
||||||
|
versionSpec: "3.8"
|
||||||
|
- script: pip install tox
|
||||||
|
displayName: "Install Tox"
|
||||||
|
- script: tox
|
||||||
|
displayName: "Run Tox"
|
||||||
|
- job: "JQ"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- script: sudo apt-get install -y jq
|
||||||
|
displayName: "Install JQ"
|
||||||
|
- bash: |
|
||||||
|
shopt -s globstar
|
||||||
|
cat **/*.json | jq '.'
|
||||||
|
displayName: "Run JQ"
|
||||||
|
- job: "Hadolint"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||||
|
displayName: "Install Hadolint"
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm -i \
|
||||||
|
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||||
|
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||||
|
displayName: "Run Hadolint"
|
53
azure-pipelines-release.yml
Normal file
53
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
# https://dev.azure.com/home-assistant
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
batch: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
tags:
|
||||||
|
include:
|
||||||
|
- "*"
|
||||||
|
pr: none
|
||||||
|
variables:
|
||||||
|
- name: versionBuilder
|
||||||
|
value: "7.0"
|
||||||
|
- group: docker
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- job: "VersionValidate"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- task: UsePythonVersion@0
|
||||||
|
displayName: "Use Python 3.8"
|
||||||
|
inputs:
|
||||||
|
versionSpec: "3.8"
|
||||||
|
- script: |
|
||||||
|
setup_version="$(python setup.py -V)"
|
||||||
|
branch_version="$(Build.SourceBranchName)"
|
||||||
|
|
||||||
|
if [ "${branch_version}" == "dev" ]; then
|
||||||
|
exit 0
|
||||||
|
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||||
|
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
displayName: "Check version of branch/tag"
|
||||||
|
- job: "Release"
|
||||||
|
dependsOn:
|
||||||
|
- "VersionValidate"
|
||||||
|
pool:
|
||||||
|
vmImage: "ubuntu-latest"
|
||||||
|
steps:
|
||||||
|
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||||
|
displayName: "Docker hub login"
|
||||||
|
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||||
|
displayName: "Install Builder"
|
||||||
|
- script: |
|
||||||
|
sudo docker run --rm --privileged \
|
||||||
|
-v ~/.docker:/root/.docker \
|
||||||
|
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||||
|
homeassistant/amd64-builder:$(versionBuilder) \
|
||||||
|
--generic $(Build.SourceBranchName) --all -t /data
|
||||||
|
displayName: "Build Release"
|
27
azure-pipelines-wheels.yml
Normal file
27
azure-pipelines-wheels.yml
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
# https://dev.azure.com/home-assistant
|
||||||
|
|
||||||
|
trigger:
|
||||||
|
batch: true
|
||||||
|
branches:
|
||||||
|
include:
|
||||||
|
- dev
|
||||||
|
pr: none
|
||||||
|
variables:
|
||||||
|
- name: versionWheels
|
||||||
|
value: '1.13.0-3.8-alpine3.12'
|
||||||
|
resources:
|
||||||
|
repositories:
|
||||||
|
- repository: azure
|
||||||
|
type: github
|
||||||
|
name: 'home-assistant/ci-azure'
|
||||||
|
endpoint: 'home-assistant'
|
||||||
|
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
- template: templates/azp-job-wheels.yaml@azure
|
||||||
|
parameters:
|
||||||
|
builderVersion: '$(versionWheels)'
|
||||||
|
builderApk: 'build-base;libffi-dev;openssl-dev'
|
||||||
|
builderPip: 'Cython'
|
||||||
|
skipBinary: 'aiohttp'
|
||||||
|
wheelsRequirement: 'requirements.txt'
|
@@ -1,45 +0,0 @@
|
|||||||
# Python package
|
|
||||||
# Create and test a Python package on multiple Python versions.
|
|
||||||
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
|
|
||||||
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
|
|
||||||
|
|
||||||
trigger:
|
|
||||||
- master
|
|
||||||
- dev
|
|
||||||
|
|
||||||
pr:
|
|
||||||
- dev
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
|
|
||||||
- job: "Tox"
|
|
||||||
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- task: UsePythonVersion@0
|
|
||||||
displayName: 'Use Python $(python.version)'
|
|
||||||
inputs:
|
|
||||||
versionSpec: '3.7'
|
|
||||||
|
|
||||||
- script: pip install tox
|
|
||||||
displayName: 'Install Tox'
|
|
||||||
|
|
||||||
- script: tox
|
|
||||||
displayName: 'Run Tox'
|
|
||||||
|
|
||||||
|
|
||||||
- job: "JQ"
|
|
||||||
|
|
||||||
pool:
|
|
||||||
vmImage: 'ubuntu-16.04'
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- script: sudo apt-get install -y jq
|
|
||||||
displayName: 'Install JQ'
|
|
||||||
|
|
||||||
- bash: |
|
|
||||||
shopt -s globstar
|
|
||||||
cat **/*.json | jq '.'
|
|
||||||
displayName: 'Run JQ'
|
|
13
build.json
Normal file
13
build.json
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
{
|
||||||
|
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||||
|
"build_from": {
|
||||||
|
"aarch64": "homeassistant/aarch64-base-python:3.8-alpine3.12",
|
||||||
|
"armhf": "homeassistant/armhf-base-python:3.8-alpine3.12",
|
||||||
|
"armv7": "homeassistant/armv7-base-python:3.8-alpine3.12",
|
||||||
|
"amd64": "homeassistant/amd64-base-python:3.8-alpine3.12",
|
||||||
|
"i386": "homeassistant/i386-base-python:3.8-alpine3.12"
|
||||||
|
},
|
||||||
|
"labels": {
|
||||||
|
"io.hass.type": "supervisor"
|
||||||
|
}
|
||||||
|
}
|
11
codecov.yaml
Normal file
11
codecov.yaml
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
codecov:
|
||||||
|
branch: dev
|
||||||
|
coverage:
|
||||||
|
status:
|
||||||
|
project:
|
||||||
|
default:
|
||||||
|
target: 40
|
||||||
|
threshold: 0.09
|
||||||
|
comment: false
|
||||||
|
github_checks:
|
||||||
|
annotations: false
|
@@ -1 +0,0 @@
|
|||||||
"""Init file for Hass.io."""
|
|
@@ -1,158 +0,0 @@
|
|||||||
"""Init file for Hass.io add-ons."""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .addon import Addon
|
|
||||||
from .repository import Repository
|
|
||||||
from .data import AddonsData
|
|
||||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
|
||||||
|
|
||||||
|
|
||||||
class AddonManager(CoreSysAttributes):
|
|
||||||
"""Manage add-ons inside Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize Docker base wrapper."""
|
|
||||||
self.coresys = coresys
|
|
||||||
self.data = AddonsData(coresys)
|
|
||||||
self.addons_obj = {}
|
|
||||||
self.repositories_obj = {}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def list_addons(self):
|
|
||||||
"""Return a list of all add-ons."""
|
|
||||||
return list(self.addons_obj.values())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def list_installed(self):
|
|
||||||
"""Return a list of installed add-ons."""
|
|
||||||
return [addon for addon in self.addons_obj.values()
|
|
||||||
if addon.is_installed]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def list_repositories(self):
|
|
||||||
"""Return list of add-on repositories."""
|
|
||||||
return list(self.repositories_obj.values())
|
|
||||||
|
|
||||||
def get(self, addon_slug):
|
|
||||||
"""Return an add-on from slug."""
|
|
||||||
return self.addons_obj.get(addon_slug)
|
|
||||||
|
|
||||||
def from_token(self, token):
|
|
||||||
"""Return an add-on from Hass.io token."""
|
|
||||||
for addon in self.list_addons:
|
|
||||||
if addon.is_installed and token == addon.hassio_token:
|
|
||||||
return addon
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def load(self):
|
|
||||||
"""Start up add-on management."""
|
|
||||||
self.data.reload()
|
|
||||||
|
|
||||||
# Init Hass.io built-in repositories
|
|
||||||
repositories = \
|
|
||||||
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
|
||||||
|
|
||||||
# Init custom repositories and load add-ons
|
|
||||||
await self.load_repositories(repositories)
|
|
||||||
|
|
||||||
async def reload(self):
|
|
||||||
"""Update add-ons from repository and reload list."""
|
|
||||||
tasks = [repository.update() for repository in
|
|
||||||
self.repositories_obj.values()]
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
# read data from repositories
|
|
||||||
self.data.reload()
|
|
||||||
|
|
||||||
# update addons
|
|
||||||
await self.load_addons()
|
|
||||||
|
|
||||||
async def load_repositories(self, list_repositories):
|
|
||||||
"""Add a new custom repository."""
|
|
||||||
new_rep = set(list_repositories)
|
|
||||||
old_rep = set(self.repositories_obj)
|
|
||||||
|
|
||||||
# add new repository
|
|
||||||
async def _add_repository(url):
|
|
||||||
"""Helper function to async add repository."""
|
|
||||||
repository = Repository(self.coresys, url)
|
|
||||||
if not await repository.load():
|
|
||||||
_LOGGER.error("Can't load from repository %s", url)
|
|
||||||
return
|
|
||||||
self.repositories_obj[url] = repository
|
|
||||||
|
|
||||||
# don't add built-in repository to config
|
|
||||||
if url not in BUILTIN_REPOSITORIES:
|
|
||||||
self.sys_config.add_addon_repository(url)
|
|
||||||
|
|
||||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
# del new repository
|
|
||||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
|
||||||
self.repositories_obj.pop(url).remove()
|
|
||||||
self.sys_config.drop_addon_repository(url)
|
|
||||||
|
|
||||||
# update data
|
|
||||||
self.data.reload()
|
|
||||||
await self.load_addons()
|
|
||||||
|
|
||||||
async def load_addons(self):
|
|
||||||
"""Update/add internal add-on store."""
|
|
||||||
all_addons = set(self.data.system) | set(self.data.cache)
|
|
||||||
|
|
||||||
# calc diff
|
|
||||||
add_addons = all_addons - set(self.addons_obj)
|
|
||||||
del_addons = set(self.addons_obj) - all_addons
|
|
||||||
|
|
||||||
_LOGGER.info("Load add-ons: %d all - %d new - %d remove",
|
|
||||||
len(all_addons), len(add_addons), len(del_addons))
|
|
||||||
|
|
||||||
# new addons
|
|
||||||
tasks = []
|
|
||||||
for addon_slug in add_addons:
|
|
||||||
addon = Addon(self.coresys, addon_slug)
|
|
||||||
|
|
||||||
tasks.append(addon.load())
|
|
||||||
self.addons_obj[addon_slug] = addon
|
|
||||||
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
# remove
|
|
||||||
for addon_slug in del_addons:
|
|
||||||
self.addons_obj.pop(addon_slug)
|
|
||||||
|
|
||||||
async def boot(self, stage):
|
|
||||||
"""Boot add-ons with mode auto."""
|
|
||||||
tasks = []
|
|
||||||
for addon in self.addons_obj.values():
|
|
||||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
|
||||||
addon.startup == stage:
|
|
||||||
tasks.append(addon.start())
|
|
||||||
|
|
||||||
_LOGGER.info("Startup %s run %d add-ons", stage, len(tasks))
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
await asyncio.sleep(self.sys_config.wait_boot)
|
|
||||||
|
|
||||||
async def shutdown(self, stage):
|
|
||||||
"""Shutdown addons."""
|
|
||||||
tasks = []
|
|
||||||
for addon in self.addons_obj.values():
|
|
||||||
if addon.is_installed and \
|
|
||||||
await addon.state() == STATE_STARTED and \
|
|
||||||
addon.startup == stage:
|
|
||||||
tasks.append(addon.stop())
|
|
||||||
|
|
||||||
_LOGGER.info("Shutdown %s stop %d add-ons", stage, len(tasks))
|
|
||||||
if tasks:
|
|
||||||
await asyncio.wait(tasks)
|
|
File diff suppressed because it is too large
Load Diff
@@ -1,374 +0,0 @@
|
|||||||
"""Validate add-ons options schema."""
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
import secrets
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ..const import (
|
|
||||||
ARCH_ALL,
|
|
||||||
ATTR_ACCESS_TOKEN,
|
|
||||||
ATTR_APPARMOR,
|
|
||||||
ATTR_ARCH,
|
|
||||||
ATTR_ARGS,
|
|
||||||
ATTR_AUDIO,
|
|
||||||
ATTR_AUDIO_INPUT,
|
|
||||||
ATTR_AUDIO_OUTPUT,
|
|
||||||
ATTR_AUTH_API,
|
|
||||||
ATTR_AUTO_UART,
|
|
||||||
ATTR_AUTO_UPDATE,
|
|
||||||
ATTR_BOOT,
|
|
||||||
ATTR_BUILD_FROM,
|
|
||||||
ATTR_DESCRIPTON,
|
|
||||||
ATTR_DEVICES,
|
|
||||||
ATTR_DEVICETREE,
|
|
||||||
ATTR_DISCOVERY,
|
|
||||||
ATTR_DOCKER_API,
|
|
||||||
ATTR_ENVIRONMENT,
|
|
||||||
ATTR_FULL_ACCESS,
|
|
||||||
ATTR_GPIO,
|
|
||||||
ATTR_HASSIO_API,
|
|
||||||
ATTR_HASSIO_ROLE,
|
|
||||||
ATTR_HOMEASSISTANT_API,
|
|
||||||
ATTR_HOMEASSISTANT,
|
|
||||||
ATTR_HOST_DBUS,
|
|
||||||
ATTR_HOST_IPC,
|
|
||||||
ATTR_HOST_NETWORK,
|
|
||||||
ATTR_HOST_PID,
|
|
||||||
ATTR_IMAGE,
|
|
||||||
ATTR_INGRESS,
|
|
||||||
ATTR_INGRESS_ENTRY,
|
|
||||||
ATTR_INGRESS_PORT,
|
|
||||||
ATTR_INGRESS_TOKEN,
|
|
||||||
ATTR_KERNEL_MODULES,
|
|
||||||
ATTR_LEGACY,
|
|
||||||
ATTR_LOCATON,
|
|
||||||
ATTR_MACHINE,
|
|
||||||
ATTR_MAINTAINER,
|
|
||||||
ATTR_MAP,
|
|
||||||
ATTR_NAME,
|
|
||||||
ATTR_NETWORK,
|
|
||||||
ATTR_OPTIONS,
|
|
||||||
ATTR_PORTS,
|
|
||||||
ATTR_PRIVILEGED,
|
|
||||||
ATTR_PROTECTED,
|
|
||||||
ATTR_REPOSITORY,
|
|
||||||
ATTR_SCHEMA,
|
|
||||||
ATTR_SERVICES,
|
|
||||||
ATTR_SLUG,
|
|
||||||
ATTR_SQUASH,
|
|
||||||
ATTR_STARTUP,
|
|
||||||
ATTR_STATE,
|
|
||||||
ATTR_STDIN,
|
|
||||||
ATTR_SYSTEM,
|
|
||||||
ATTR_TIMEOUT,
|
|
||||||
ATTR_TMPFS,
|
|
||||||
ATTR_URL,
|
|
||||||
ATTR_USER,
|
|
||||||
ATTR_UUID,
|
|
||||||
ATTR_VERSION,
|
|
||||||
ATTR_WEBUI,
|
|
||||||
BOOT_AUTO,
|
|
||||||
BOOT_MANUAL,
|
|
||||||
PRIVILEGED_ALL,
|
|
||||||
ROLE_ALL,
|
|
||||||
ROLE_DEFAULT,
|
|
||||||
STARTUP_ALL,
|
|
||||||
STARTUP_APPLICATION,
|
|
||||||
STARTUP_SERVICES,
|
|
||||||
STATE_STARTED,
|
|
||||||
STATE_STOPPED,
|
|
||||||
)
|
|
||||||
from ..discovery.validate import valid_discovery_service
|
|
||||||
from ..validate import ALSA_DEVICE, DOCKER_PORTS, NETWORK_PORT, TOKEN, UUID_MATCH
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
|
||||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
|
||||||
|
|
||||||
V_STR = 'str'
|
|
||||||
V_INT = 'int'
|
|
||||||
V_FLOAT = 'float'
|
|
||||||
V_BOOL = 'bool'
|
|
||||||
V_EMAIL = 'email'
|
|
||||||
V_URL = 'url'
|
|
||||||
V_PORT = 'port'
|
|
||||||
V_MATCH = 'match'
|
|
||||||
|
|
||||||
RE_SCHEMA_ELEMENT = re.compile(
|
|
||||||
r"^(?:"
|
|
||||||
r"|str|bool|email|url|port"
|
|
||||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
|
||||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
|
||||||
r"|match\((?P<match>.*)\)"
|
|
||||||
r")\??$"
|
|
||||||
)
|
|
||||||
|
|
||||||
RE_DOCKER_IMAGE = re.compile(
|
|
||||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
|
||||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
|
||||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
|
|
||||||
|
|
||||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
|
||||||
|
|
||||||
|
|
||||||
MACHINE_ALL = [
|
|
||||||
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
|
|
||||||
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
|
|
||||||
'raspberrypi3', 'raspberrypi3-64', 'tinker',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _simple_startup(value):
|
|
||||||
"""Simple startup schema."""
|
|
||||||
if value == "before":
|
|
||||||
return STARTUP_SERVICES
|
|
||||||
if value == "after":
|
|
||||||
return STARTUP_APPLICATION
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_ARCH): [vol.In(ARCH_ALL)],
|
|
||||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
|
||||||
vol.Optional(ATTR_URL): vol.Url(),
|
|
||||||
vol.Required(ATTR_STARTUP):
|
|
||||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
|
||||||
vol.Required(ATTR_BOOT):
|
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
|
||||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
|
||||||
vol.Optional(ATTR_WEBUI):
|
|
||||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
|
||||||
vol.Optional(ATTR_INGRESS, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_INGRESS_PORT, default=8099): NETWORK_PORT,
|
|
||||||
vol.Optional(ATTR_INGRESS_ENTRY): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Maybe(vol.Coerce(str)),
|
|
||||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
|
||||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_TMPFS):
|
|
||||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
|
||||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
|
||||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
|
||||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
|
||||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
|
||||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
|
||||||
vol.Optional(ATTR_DISCOVERY): [valid_discovery_service],
|
|
||||||
vol.Required(ATTR_OPTIONS): dict,
|
|
||||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
|
||||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
|
||||||
vol.Any(
|
|
||||||
SCHEMA_ELEMENT,
|
|
||||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
|
||||||
),
|
|
||||||
], vol.Schema({
|
|
||||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
|
||||||
}))
|
|
||||||
}), False),
|
|
||||||
vol.Optional(ATTR_IMAGE):
|
|
||||||
vol.Match(RE_DOCKER_IMAGE),
|
|
||||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
|
||||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
|
||||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_URL): vol.Url(),
|
|
||||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
|
||||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
|
|
||||||
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
|
|
||||||
}),
|
|
||||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
|
||||||
vol.Coerce(str): vol.Coerce(str)
|
|
||||||
}),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_ADDON_USER = vol.Schema({
|
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
|
||||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
|
||||||
vol.Optional(ATTR_INGRESS_TOKEN, default=secrets.token_urlsafe): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
|
||||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_BOOT):
|
|
||||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
|
||||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
|
||||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
|
||||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
|
||||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
|
||||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
|
||||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDONS_FILE = vol.Schema({
|
|
||||||
vol.Optional(ATTR_USER, default=dict): {
|
|
||||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
|
||||||
},
|
|
||||||
vol.Optional(ATTR_SYSTEM, default=dict): {
|
|
||||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
|
||||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
|
||||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
|
||||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
|
||||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
|
||||||
}, extra=vol.REMOVE_EXTRA)
|
|
||||||
|
|
||||||
|
|
||||||
def validate_options(raw_schema):
|
|
||||||
"""Validate schema."""
|
|
||||||
def validate(struct):
|
|
||||||
"""Create schema validator for add-ons options."""
|
|
||||||
options = {}
|
|
||||||
|
|
||||||
# read options
|
|
||||||
for key, value in struct.items():
|
|
||||||
# Ignore unknown options / remove from list
|
|
||||||
if key not in raw_schema:
|
|
||||||
_LOGGER.warning("Unknown options %s", key)
|
|
||||||
continue
|
|
||||||
|
|
||||||
typ = raw_schema[key]
|
|
||||||
try:
|
|
||||||
if isinstance(typ, list):
|
|
||||||
# nested value list
|
|
||||||
options[key] = _nested_validate_list(typ[0], value, key)
|
|
||||||
elif isinstance(typ, dict):
|
|
||||||
# nested value dict
|
|
||||||
options[key] = _nested_validate_dict(typ, value, key)
|
|
||||||
else:
|
|
||||||
# normal value
|
|
||||||
options[key] = _single_validate(typ, value, key)
|
|
||||||
except (IndexError, KeyError):
|
|
||||||
raise vol.Invalid(f"Type error for {key}") from None
|
|
||||||
|
|
||||||
_check_missing_options(raw_schema, options, 'root')
|
|
||||||
return options
|
|
||||||
|
|
||||||
return validate
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
# pylint: disable=inconsistent-return-statements
|
|
||||||
def _single_validate(typ, value, key):
|
|
||||||
"""Validate a single element."""
|
|
||||||
# if required argument
|
|
||||||
if value is None:
|
|
||||||
raise vol.Invalid(f"Missing required option '{key}'")
|
|
||||||
|
|
||||||
# parse extend data from type
|
|
||||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
|
||||||
|
|
||||||
# prepare range
|
|
||||||
range_args = {}
|
|
||||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
|
||||||
group_value = match.group(group_name)
|
|
||||||
if group_value:
|
|
||||||
range_args[group_name[2:]] = float(group_value)
|
|
||||||
|
|
||||||
if typ.startswith(V_STR):
|
|
||||||
return str(value)
|
|
||||||
elif typ.startswith(V_INT):
|
|
||||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
|
||||||
elif typ.startswith(V_FLOAT):
|
|
||||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
|
||||||
elif typ.startswith(V_BOOL):
|
|
||||||
return vol.Boolean()(value)
|
|
||||||
elif typ.startswith(V_EMAIL):
|
|
||||||
return vol.Email()(value)
|
|
||||||
elif typ.startswith(V_URL):
|
|
||||||
return vol.Url()(value)
|
|
||||||
elif typ.startswith(V_PORT):
|
|
||||||
return NETWORK_PORT(value)
|
|
||||||
elif typ.startswith(V_MATCH):
|
|
||||||
return vol.Match(match.group('match'))(str(value))
|
|
||||||
|
|
||||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
|
||||||
|
|
||||||
|
|
||||||
def _nested_validate_list(typ, data_list, key):
|
|
||||||
"""Validate nested items."""
|
|
||||||
options = []
|
|
||||||
|
|
||||||
for element in data_list:
|
|
||||||
# Nested?
|
|
||||||
if isinstance(typ, dict):
|
|
||||||
c_options = _nested_validate_dict(typ, element, key)
|
|
||||||
options.append(c_options)
|
|
||||||
else:
|
|
||||||
options.append(_single_validate(typ, element, key))
|
|
||||||
|
|
||||||
return options
|
|
||||||
|
|
||||||
|
|
||||||
def _nested_validate_dict(typ, data_dict, key):
|
|
||||||
"""Validate nested items."""
|
|
||||||
options = {}
|
|
||||||
|
|
||||||
for c_key, c_value in data_dict.items():
|
|
||||||
# Ignore unknown options / remove from list
|
|
||||||
if c_key not in typ:
|
|
||||||
_LOGGER.warning("Unknown options %s", c_key)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Nested?
|
|
||||||
if isinstance(typ[c_key], list):
|
|
||||||
options[c_key] = _nested_validate_list(typ[c_key][0],
|
|
||||||
c_value, c_key)
|
|
||||||
else:
|
|
||||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
|
||||||
|
|
||||||
_check_missing_options(typ, options, key)
|
|
||||||
return options
|
|
||||||
|
|
||||||
|
|
||||||
def _check_missing_options(origin, exists, root):
|
|
||||||
"""Check if all options are exists."""
|
|
||||||
missing = set(origin) - set(exists)
|
|
||||||
for miss_opt in missing:
|
|
||||||
if isinstance(origin[miss_opt], str) and \
|
|
||||||
origin[miss_opt].endswith("?"):
|
|
||||||
continue
|
|
||||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
|
@@ -1,296 +0,0 @@
|
|||||||
"""Init file for Hass.io RESTful API."""
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
from ..coresys import CoreSys, CoreSysAttributes
|
|
||||||
from .addons import APIAddons
|
|
||||||
from .auth import APIAuth
|
|
||||||
from .discovery import APIDiscovery
|
|
||||||
from .hardware import APIHardware
|
|
||||||
from .hassos import APIHassOS
|
|
||||||
from .homeassistant import APIHomeAssistant
|
|
||||||
from .host import APIHost
|
|
||||||
from .info import APIInfo
|
|
||||||
from .ingress import APIIngress
|
|
||||||
from .proxy import APIProxy
|
|
||||||
from .security import SecurityMiddleware
|
|
||||||
from .services import APIServices
|
|
||||||
from .snapshots import APISnapshots
|
|
||||||
from .supervisor import APISupervisor
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class RestAPI(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
|
||||||
"""Initialize Docker base wrapper."""
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self.security: SecurityMiddleware = SecurityMiddleware(coresys)
|
|
||||||
self.webapp: web.Application = web.Application(
|
|
||||||
middlewares=[self.security.token_validation])
|
|
||||||
|
|
||||||
# service stuff
|
|
||||||
self._runner: web.AppRunner = web.AppRunner(self.webapp)
|
|
||||||
self._site: Optional[web.TCPSite] = None
|
|
||||||
|
|
||||||
async def load(self) -> None:
|
|
||||||
"""Register REST API Calls."""
|
|
||||||
self._register_supervisor()
|
|
||||||
self._register_host()
|
|
||||||
self._register_hassos()
|
|
||||||
self._register_hardware()
|
|
||||||
self._register_homeassistant()
|
|
||||||
self._register_proxy()
|
|
||||||
self._register_panel()
|
|
||||||
self._register_addons()
|
|
||||||
self._register_ingress()
|
|
||||||
self._register_snapshots()
|
|
||||||
self._register_discovery()
|
|
||||||
self._register_services()
|
|
||||||
self._register_info()
|
|
||||||
self._register_auth()
|
|
||||||
|
|
||||||
def _register_host(self) -> None:
|
|
||||||
"""Register hostcontrol functions."""
|
|
||||||
api_host = APIHost()
|
|
||||||
api_host.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/host/info', api_host.info),
|
|
||||||
web.post('/host/reboot', api_host.reboot),
|
|
||||||
web.post('/host/shutdown', api_host.shutdown),
|
|
||||||
web.post('/host/reload', api_host.reload),
|
|
||||||
web.post('/host/options', api_host.options),
|
|
||||||
web.get('/host/services', api_host.services),
|
|
||||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
|
||||||
web.post('/host/services/{service}/start', api_host.service_start),
|
|
||||||
web.post('/host/services/{service}/restart',
|
|
||||||
api_host.service_restart),
|
|
||||||
web.post('/host/services/{service}/reload',
|
|
||||||
api_host.service_reload),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_hassos(self) -> None:
|
|
||||||
"""Register HassOS functions."""
|
|
||||||
api_hassos = APIHassOS()
|
|
||||||
api_hassos.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/hassos/info', api_hassos.info),
|
|
||||||
web.post('/hassos/update', api_hassos.update),
|
|
||||||
web.post('/hassos/update/cli', api_hassos.update_cli),
|
|
||||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_hardware(self) -> None:
|
|
||||||
"""Register hardware functions."""
|
|
||||||
api_hardware = APIHardware()
|
|
||||||
api_hardware.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/hardware/info', api_hardware.info),
|
|
||||||
web.get('/hardware/audio', api_hardware.audio),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_info(self) -> None:
|
|
||||||
"""Register info functions."""
|
|
||||||
api_info = APIInfo()
|
|
||||||
api_info.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/info', api_info.info),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_auth(self) -> None:
|
|
||||||
"""Register auth functions."""
|
|
||||||
api_auth = APIAuth()
|
|
||||||
api_auth.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.post('/auth', api_auth.auth),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_supervisor(self) -> None:
|
|
||||||
"""Register Supervisor functions."""
|
|
||||||
api_supervisor = APISupervisor()
|
|
||||||
api_supervisor.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/supervisor/ping', api_supervisor.ping),
|
|
||||||
web.get('/supervisor/info', api_supervisor.info),
|
|
||||||
web.get('/supervisor/stats', api_supervisor.stats),
|
|
||||||
web.get('/supervisor/logs', api_supervisor.logs),
|
|
||||||
web.post('/supervisor/update', api_supervisor.update),
|
|
||||||
web.post('/supervisor/reload', api_supervisor.reload),
|
|
||||||
web.post('/supervisor/options', api_supervisor.options),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_homeassistant(self) -> None:
|
|
||||||
"""Register Home Assistant functions."""
|
|
||||||
api_hass = APIHomeAssistant()
|
|
||||||
api_hass.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/homeassistant/info', api_hass.info),
|
|
||||||
web.get('/homeassistant/logs', api_hass.logs),
|
|
||||||
web.get('/homeassistant/stats', api_hass.stats),
|
|
||||||
web.post('/homeassistant/options', api_hass.options),
|
|
||||||
web.post('/homeassistant/update', api_hass.update),
|
|
||||||
web.post('/homeassistant/restart', api_hass.restart),
|
|
||||||
web.post('/homeassistant/stop', api_hass.stop),
|
|
||||||
web.post('/homeassistant/start', api_hass.start),
|
|
||||||
web.post('/homeassistant/check', api_hass.check),
|
|
||||||
web.post('/homeassistant/rebuild', api_hass.rebuild),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_proxy(self) -> None:
|
|
||||||
"""Register Home Assistant API Proxy."""
|
|
||||||
api_proxy = APIProxy()
|
|
||||||
api_proxy.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
|
||||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
|
||||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
|
||||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
|
||||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
|
||||||
web.get('/homeassistant/api/', api_proxy.api),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_addons(self) -> None:
|
|
||||||
"""Register Add-on functions."""
|
|
||||||
api_addons = APIAddons()
|
|
||||||
api_addons.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/addons', api_addons.list),
|
|
||||||
web.post('/addons/reload', api_addons.reload),
|
|
||||||
web.get('/addons/{addon}/info', api_addons.info),
|
|
||||||
web.post('/addons/{addon}/install', api_addons.install),
|
|
||||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
|
||||||
web.post('/addons/{addon}/start', api_addons.start),
|
|
||||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
|
||||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
|
||||||
web.post('/addons/{addon}/update', api_addons.update),
|
|
||||||
web.post('/addons/{addon}/options', api_addons.options),
|
|
||||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
|
||||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
|
||||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
|
||||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
|
||||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
|
||||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
|
||||||
web.post('/addons/{addon}/security', api_addons.security),
|
|
||||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_ingress(self) -> None:
|
|
||||||
"""Register Ingress functions."""
|
|
||||||
api_ingress = APIIngress()
|
|
||||||
api_ingress.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.post('/ingress/session', api_ingress.create_session),
|
|
||||||
web.view('/ingress/{token}/{path:.*}', api_ingress.handler),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_snapshots(self) -> None:
|
|
||||||
"""Register snapshots functions."""
|
|
||||||
api_snapshots = APISnapshots()
|
|
||||||
api_snapshots.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/snapshots', api_snapshots.list),
|
|
||||||
web.post('/snapshots/reload', api_snapshots.reload),
|
|
||||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
|
||||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
|
||||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
|
||||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
|
||||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
|
||||||
web.post('/snapshots/{snapshot}/restore/full',
|
|
||||||
api_snapshots.restore_full),
|
|
||||||
web.post('/snapshots/{snapshot}/restore/partial',
|
|
||||||
api_snapshots.restore_partial),
|
|
||||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_services(self) -> None:
|
|
||||||
"""Register services functions."""
|
|
||||||
api_services = APIServices()
|
|
||||||
api_services.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/services', api_services.list),
|
|
||||||
web.get('/services/{service}', api_services.get_service),
|
|
||||||
web.post('/services/{service}', api_services.set_service),
|
|
||||||
web.delete('/services/{service}', api_services.del_service),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_discovery(self) -> None:
|
|
||||||
"""Register discovery functions."""
|
|
||||||
api_discovery = APIDiscovery()
|
|
||||||
api_discovery.coresys = self.coresys
|
|
||||||
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/discovery', api_discovery.list),
|
|
||||||
web.get('/discovery/{uuid}', api_discovery.get_discovery),
|
|
||||||
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
|
|
||||||
web.post('/discovery', api_discovery.set_discovery),
|
|
||||||
])
|
|
||||||
|
|
||||||
def _register_panel(self) -> None:
|
|
||||||
"""Register panel for Home Assistant."""
|
|
||||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
|
||||||
|
|
||||||
def create_response(panel_file):
|
|
||||||
"""Create a function to generate a response."""
|
|
||||||
path = panel_dir.joinpath(f"{panel_file!s}.html")
|
|
||||||
return lambda request: web.FileResponse(path)
|
|
||||||
|
|
||||||
# This route is for backwards compatibility with HA < 0.58
|
|
||||||
self.webapp.add_routes(
|
|
||||||
[web.get('/panel', create_response('hassio-main-es5'))])
|
|
||||||
|
|
||||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
|
||||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
|
||||||
])
|
|
||||||
|
|
||||||
# This route is for backwards compatibility with HA 0.62 - 0.70
|
|
||||||
self.webapp.add_routes([
|
|
||||||
web.get('/app-es5/index.html', create_response('index')),
|
|
||||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
|
||||||
])
|
|
||||||
|
|
||||||
# This route is for HA > 0.70
|
|
||||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
"""Run RESTful API webserver."""
|
|
||||||
await self._runner.setup()
|
|
||||||
self._site = web.TCPSite(
|
|
||||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
|
|
||||||
|
|
||||||
try:
|
|
||||||
await self._site.start()
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
|
|
||||||
err)
|
|
||||||
else:
|
|
||||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
|
||||||
"""Stop RESTful API webserver."""
|
|
||||||
if not self._site:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Shutdown running API
|
|
||||||
await self._site.stop()
|
|
||||||
await self._runner.cleanup()
|
|
||||||
|
|
||||||
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
|
@@ -1,61 +0,0 @@
|
|||||||
"""Init file for Hass.io auth/SSO RESTful API."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from aiohttp import BasicAuth
|
|
||||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
|
||||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION, WWW_AUTHENTICATE
|
|
||||||
|
|
||||||
from .utils import api_process
|
|
||||||
from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from ..exceptions import APIForbidden
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class APIAuth(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for auth functions."""
|
|
||||||
|
|
||||||
def _process_basic(self, request, addon):
|
|
||||||
"""Process login request with basic auth.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
|
|
||||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
|
||||||
|
|
||||||
def _process_dict(self, request, addon, data):
|
|
||||||
"""Process login with dict data.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
username = data.get('username') or data.get('user')
|
|
||||||
password = data.get('password')
|
|
||||||
|
|
||||||
return self.sys_auth.check_login(addon, username, password)
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def auth(self, request):
|
|
||||||
"""Process login request."""
|
|
||||||
addon = request[REQUEST_FROM]
|
|
||||||
|
|
||||||
if not addon.access_auth_api:
|
|
||||||
raise APIForbidden("Can't use Home Assistant auth!")
|
|
||||||
|
|
||||||
# BasicAuth
|
|
||||||
if AUTHORIZATION in request.headers:
|
|
||||||
return await self._process_basic(request, addon)
|
|
||||||
|
|
||||||
# Json
|
|
||||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
|
||||||
data = await request.json()
|
|
||||||
return await self._process_dict(request, addon, data)
|
|
||||||
|
|
||||||
# URL encoded
|
|
||||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
|
||||||
data = await request.post()
|
|
||||||
return await self._process_dict(request, addon, data)
|
|
||||||
|
|
||||||
raise HTTPUnauthorized(headers={
|
|
||||||
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
|
|
||||||
})
|
|
@@ -1,34 +0,0 @@
|
|||||||
"""Init file for Hass.io hardware RESTful API."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .utils import api_process
|
|
||||||
from ..const import (
|
|
||||||
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class APIHardware(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for hardware functions."""
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request):
|
|
||||||
"""Show hardware info."""
|
|
||||||
return {
|
|
||||||
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
|
||||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
|
||||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
|
||||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
|
||||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def audio(self, request):
|
|
||||||
"""Show ALSA audio devices."""
|
|
||||||
return {
|
|
||||||
ATTR_AUDIO: {
|
|
||||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
|
||||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,57 +0,0 @@
|
|||||||
"""Init file for Hass.io HassOS RESTful API."""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
from typing import Any, Awaitable, Dict
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
from aiohttp import web
|
|
||||||
|
|
||||||
from ..const import (
|
|
||||||
ATTR_BOARD,
|
|
||||||
ATTR_VERSION,
|
|
||||||
ATTR_VERSION_CLI,
|
|
||||||
ATTR_VERSION_CLI_LATEST,
|
|
||||||
ATTR_VERSION_LATEST,
|
|
||||||
)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from .utils import api_process, api_validate
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
|
||||||
|
|
||||||
|
|
||||||
class APIHassOS(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for HassOS functions."""
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
|
||||||
"""Return HassOS information."""
|
|
||||||
return {
|
|
||||||
ATTR_VERSION: self.sys_hassos.version,
|
|
||||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
|
||||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
|
||||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
|
||||||
ATTR_BOARD: self.sys_hassos.board,
|
|
||||||
}
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def update(self, request: web.Request) -> None:
|
|
||||||
"""Update HassOS."""
|
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
|
||||||
|
|
||||||
await asyncio.shield(self.sys_hassos.update(version))
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def update_cli(self, request: web.Request) -> None:
|
|
||||||
"""Update HassOS CLI."""
|
|
||||||
body = await api_validate(SCHEMA_VERSION, request)
|
|
||||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
|
||||||
|
|
||||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
|
||||||
"""Trigger config reload on HassOS."""
|
|
||||||
return asyncio.shield(self.sys_hassos.config_sync())
|
|
@@ -1,28 +0,0 @@
|
|||||||
"""Init file for Hass.io info RESTful API."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from ..const import (ATTR_ARCH, ATTR_CHANNEL, ATTR_HASSOS, ATTR_HOMEASSISTANT,
|
|
||||||
ATTR_HOSTNAME, ATTR_MACHINE, ATTR_SUPERVISOR,
|
|
||||||
ATTR_SUPPORTED_ARCH)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from .utils import api_process
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class APIInfo(CoreSysAttributes):
|
|
||||||
"""Handle RESTful API for info functions."""
|
|
||||||
|
|
||||||
@api_process
|
|
||||||
async def info(self, request):
|
|
||||||
"""Show system info."""
|
|
||||||
return {
|
|
||||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
|
||||||
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
|
||||||
ATTR_HASSOS: self.sys_hassos.version,
|
|
||||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
|
||||||
ATTR_MACHINE: self.sys_machine,
|
|
||||||
ATTR_ARCH: self.sys_arch.default,
|
|
||||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
|
||||||
ATTR_CHANNEL: self.sys_updater.channel,
|
|
||||||
}
|
|
File diff suppressed because one or more lines are too long
@@ -1,32 +0,0 @@
|
|||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
|
||||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
|
||||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
|
||||||
Code distributed by Google as part of the polymer project is also
|
|
||||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @fileoverview
|
|
||||||
* @suppress {checkPrototypalTypes}
|
|
||||||
* @license Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
|
||||||
* This code may only be used under the BSD style license found at
|
|
||||||
* http://polymer.github.io/LICENSE.txt The complete set of authors may be found
|
|
||||||
* at http://polymer.github.io/AUTHORS.txt The complete set of contributors may
|
|
||||||
* be found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by
|
|
||||||
* Google as part of the polymer project is also subject to an additional IP
|
|
||||||
* rights grant found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
@license
|
|
||||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
|
||||||
This code may only be used under the BSD style license found at
|
|
||||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
|
||||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
|
||||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
|
||||||
part of the polymer project is also subject to an additional IP rights grant
|
|
||||||
found at http://polymer.github.io/PATENTS.txt
|
|
||||||
*/
|
|
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.1ac383635811d6c2cb4b.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
{"version":3,"sources":[],"names":[],"mappings":"","file":"chunk.31b41b04602ce627ad98.js","sourceRoot":""}
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{114:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(104),i=t.n(e),o=t(106),u=t.n(o),a=i.a,c=u.a}}]);
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
|||||||
!function(e){function n(n){for(var t,o,i=n[0],a=n[1],u=0,f=[];u<i.length;u++)o=i[u],r[o]&&f.push(r[o][0]),r[o]=0;for(t in a)Object.prototype.hasOwnProperty.call(a,t)&&(e[t]=a[t]);for(c&&c(n);f.length;)f.shift()()}var t={},r={1:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var a,u=document.createElement("script");u.charset="utf-8",u.timeout=120,o.nc&&u.setAttribute("nonce",o.nc),u.src=function(e){return o.p+"chunk."+{0:"1ac383635811d6c2cb4b",2:"381b1e7d41316cfb583c",3:"a6e3bc73416702354e6d",4:"8a4a3a3274af0f09d86b",5:"7589a9f39a552ee63688",6:"31b41b04602ce627ad98",7:"ff45557361d5d6bd46af"}[e]+".js"}(e),a=function(n){u.onerror=u.onload=null,clearTimeout(c);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,a=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");a.type=o,a.request=i,t[1](a)}r[e]=void 0}};var c=setTimeout(function(){a({type:"timeout",target:u})},12e4);u.onerror=u.onload=a,document.head.appendChild(u)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],a=i.push.bind(i);i.push=n,i=i.slice();for(var u=0;u<i.length;u++)n(i[u]);var c=a;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(2)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(6),t.e(3)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,38 +0,0 @@
|
|||||||
<!doctype html>
|
|
||||||
<html>
|
|
||||||
<head>
|
|
||||||
<meta charset="utf-8">
|
|
||||||
<title>Hass.io</title>
|
|
||||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
|
||||||
<style>
|
|
||||||
body {
|
|
||||||
height: 100vh;
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
}
|
|
||||||
</style>
|
|
||||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
|
||||||
</head>
|
|
||||||
<body>
|
|
||||||
<hassio-app></hassio-app>
|
|
||||||
<script>
|
|
||||||
function addScript(src) {
|
|
||||||
var e = document.createElement('script');
|
|
||||||
e.src = src;
|
|
||||||
document.write(e.outerHTML);
|
|
||||||
}
|
|
||||||
var webComponentsSupported = (
|
|
||||||
'customElements' in window &&
|
|
||||||
'import' in document.createElement('link') &&
|
|
||||||
'content' in document.createElement('template'));
|
|
||||||
if (!webComponentsSupported) {
|
|
||||||
addScript('/static/webcomponents-lite.js');
|
|
||||||
}
|
|
||||||
</script>
|
|
||||||
<!--
|
|
||||||
Disabled while we make Home Assistant able to serve the right files.
|
|
||||||
<script src="./app.js"></script>
|
|
||||||
-->
|
|
||||||
<link rel='import' href='./hassio-app.html'>
|
|
||||||
</body>
|
|
||||||
</html>
|
|
Binary file not shown.
@@ -1,95 +0,0 @@
|
|||||||
"""Init file for Hass.io util for RESTful API."""
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from aiohttp import web
|
|
||||||
import voluptuous as vol
|
|
||||||
from voluptuous.humanize import humanize_error
|
|
||||||
|
|
||||||
from ..const import (
|
|
||||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
|
||||||
CONTENT_TYPE_BINARY)
|
|
||||||
from ..exceptions import HassioError, APIError, APIForbidden
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def json_loads(data):
|
|
||||||
"""Extract json from string with support for '' and None."""
|
|
||||||
if not data:
|
|
||||||
return {}
|
|
||||||
try:
|
|
||||||
return json.loads(data)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
raise APIError("Invalid json")
|
|
||||||
|
|
||||||
|
|
||||||
def api_process(method):
|
|
||||||
"""Wrap function with true/false calls to rest api."""
|
|
||||||
async def wrap_api(api, *args, **kwargs):
|
|
||||||
"""Return API information."""
|
|
||||||
try:
|
|
||||||
answer = await method(api, *args, **kwargs)
|
|
||||||
except (APIError, APIForbidden) as err:
|
|
||||||
return api_return_error(message=str(err))
|
|
||||||
except HassioError:
|
|
||||||
return api_return_error(message="Unknown Error, see logs")
|
|
||||||
|
|
||||||
if isinstance(answer, dict):
|
|
||||||
return api_return_ok(data=answer)
|
|
||||||
if isinstance(answer, web.Response):
|
|
||||||
return answer
|
|
||||||
elif isinstance(answer, bool) and not answer:
|
|
||||||
return api_return_error()
|
|
||||||
return api_return_ok()
|
|
||||||
|
|
||||||
return wrap_api
|
|
||||||
|
|
||||||
|
|
||||||
def api_process_raw(content):
|
|
||||||
"""Wrap content_type into function."""
|
|
||||||
def wrap_method(method):
|
|
||||||
"""Wrap function with raw output to rest api."""
|
|
||||||
async def wrap_api(api, *args, **kwargs):
|
|
||||||
"""Return api information."""
|
|
||||||
try:
|
|
||||||
msg_data = await method(api, *args, **kwargs)
|
|
||||||
msg_type = content
|
|
||||||
except (APIError, APIForbidden) as err:
|
|
||||||
msg_data = str(err).encode()
|
|
||||||
msg_type = CONTENT_TYPE_BINARY
|
|
||||||
except HassioError:
|
|
||||||
msg_data = b''
|
|
||||||
msg_type = CONTENT_TYPE_BINARY
|
|
||||||
|
|
||||||
return web.Response(body=msg_data, content_type=msg_type)
|
|
||||||
|
|
||||||
return wrap_api
|
|
||||||
return wrap_method
|
|
||||||
|
|
||||||
|
|
||||||
def api_return_error(message=None):
|
|
||||||
"""Return an API error message."""
|
|
||||||
return web.json_response({
|
|
||||||
JSON_RESULT: RESULT_ERROR,
|
|
||||||
JSON_MESSAGE: message,
|
|
||||||
}, status=400)
|
|
||||||
|
|
||||||
|
|
||||||
def api_return_ok(data=None):
|
|
||||||
"""Return an API ok answer."""
|
|
||||||
return web.json_response({
|
|
||||||
JSON_RESULT: RESULT_OK,
|
|
||||||
JSON_DATA: data or {},
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
async def api_validate(schema, request):
|
|
||||||
"""Validate request data with schema."""
|
|
||||||
data = await request.json(loads=json_loads)
|
|
||||||
try:
|
|
||||||
data = schema(data)
|
|
||||||
except vol.Invalid as ex:
|
|
||||||
raise APIError(humanize_error(data, ex)) from None
|
|
||||||
|
|
||||||
return data
|
|
@@ -1,49 +0,0 @@
|
|||||||
{
|
|
||||||
"raspberrypi": [
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"raspberrypi2": [
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"raspberrypi3": [
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"raspberrypi3-64": [
|
|
||||||
"aarch64",
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"tinker": [
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"odroid-c2": [
|
|
||||||
"aarch64"
|
|
||||||
],
|
|
||||||
"odroid-xu": [
|
|
||||||
"armv7",
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"orangepi-prime": [
|
|
||||||
"aarch64"
|
|
||||||
],
|
|
||||||
"qemux86": [
|
|
||||||
"i386"
|
|
||||||
],
|
|
||||||
"qemux86-64": [
|
|
||||||
"amd64",
|
|
||||||
"i386"
|
|
||||||
],
|
|
||||||
"qemuarm": [
|
|
||||||
"armhf"
|
|
||||||
],
|
|
||||||
"qemuarm-64": [
|
|
||||||
"aarch64"
|
|
||||||
],
|
|
||||||
"intel-nuc": [
|
|
||||||
"amd64",
|
|
||||||
"i386"
|
|
||||||
]
|
|
||||||
}
|
|
@@ -1,95 +0,0 @@
|
|||||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
|
||||||
import logging
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON)
|
|
||||||
from .coresys import CoreSysAttributes
|
|
||||||
from .utils.json import JsonConfig
|
|
||||||
from .validate import SCHEMA_AUTH_CONFIG
|
|
||||||
from .exceptions import AuthError, HomeAssistantAPIError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Auth(JsonConfig, CoreSysAttributes):
|
|
||||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize updater."""
|
|
||||||
super().__init__(FILE_HASSIO_AUTH, SCHEMA_AUTH_CONFIG)
|
|
||||||
self.coresys = coresys
|
|
||||||
|
|
||||||
def _check_cache(self, username, password):
|
|
||||||
"""Check password in cache."""
|
|
||||||
username_h = _rehash(username)
|
|
||||||
password_h = _rehash(password, username)
|
|
||||||
|
|
||||||
if self._data.get(username_h) == password_h:
|
|
||||||
_LOGGER.info("Cache hit for %s", username)
|
|
||||||
return True
|
|
||||||
|
|
||||||
_LOGGER.warning("No cache hit for %s", username)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _update_cache(self, username, password):
|
|
||||||
"""Cache a username, password."""
|
|
||||||
username_h = _rehash(username)
|
|
||||||
password_h = _rehash(password, username)
|
|
||||||
|
|
||||||
if self._data.get(username_h) == password_h:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._data[username_h] = password_h
|
|
||||||
self.save_data()
|
|
||||||
|
|
||||||
def _dismatch_cache(self, username, password):
|
|
||||||
"""Remove user from cache."""
|
|
||||||
username_h = _rehash(username)
|
|
||||||
password_h = _rehash(password, username)
|
|
||||||
|
|
||||||
if self._data.get(username_h) != password_h:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._data.pop(username_h, None)
|
|
||||||
self.save_data()
|
|
||||||
|
|
||||||
async def check_login(self, addon, username, password):
|
|
||||||
"""Check username login."""
|
|
||||||
if password is None:
|
|
||||||
_LOGGER.error("None as password is not supported!")
|
|
||||||
raise AuthError()
|
|
||||||
_LOGGER.info("Auth request from %s for %s", addon.slug, username)
|
|
||||||
|
|
||||||
# Check API state
|
|
||||||
if not await self.sys_homeassistant.check_api_state():
|
|
||||||
_LOGGER.info("Home Assistant not running, check cache")
|
|
||||||
return self._check_cache(username, password)
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with self.sys_homeassistant.make_request(
|
|
||||||
'post', 'api/hassio_auth', json={
|
|
||||||
ATTR_USERNAME: username,
|
|
||||||
ATTR_PASSWORD: password,
|
|
||||||
ATTR_ADDON: addon.slug,
|
|
||||||
}) as req:
|
|
||||||
|
|
||||||
if req.status == 200:
|
|
||||||
_LOGGER.info("Success login from %s", username)
|
|
||||||
self._update_cache(username, password)
|
|
||||||
return True
|
|
||||||
|
|
||||||
_LOGGER.warning("Wrong login from %s", username)
|
|
||||||
self._dismatch_cache(username, password)
|
|
||||||
return False
|
|
||||||
except HomeAssistantAPIError:
|
|
||||||
_LOGGER.error("Can't request auth on Home Assistant!")
|
|
||||||
|
|
||||||
raise AuthError()
|
|
||||||
|
|
||||||
|
|
||||||
def _rehash(value, salt2=""):
|
|
||||||
"""Rehash a value."""
|
|
||||||
for idx in range(1, 20):
|
|
||||||
value = hashlib.sha256(f"{value}{idx}{salt2}".encode()).hexdigest()
|
|
||||||
return value
|
|
@@ -1,208 +0,0 @@
|
|||||||
"""Bootstrap Hass.io."""
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
import shutil
|
|
||||||
import signal
|
|
||||||
|
|
||||||
from colorlog import ColoredFormatter
|
|
||||||
|
|
||||||
from .addons import AddonManager
|
|
||||||
from .api import RestAPI
|
|
||||||
from .arch import CpuArch
|
|
||||||
from .auth import Auth
|
|
||||||
from .const import SOCKET_DOCKER
|
|
||||||
from .core import HassIO
|
|
||||||
from .coresys import CoreSys
|
|
||||||
from .dbus import DBusManager
|
|
||||||
from .discovery import Discovery
|
|
||||||
from .hassos import HassOS
|
|
||||||
from .homeassistant import HomeAssistant
|
|
||||||
from .host import HostManager
|
|
||||||
from .ingress import Ingress
|
|
||||||
from .services import ServiceManager
|
|
||||||
from .snapshots import SnapshotManager
|
|
||||||
from .supervisor import Supervisor
|
|
||||||
from .tasks import Tasks
|
|
||||||
from .updater import Updater
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
ENV_SHARE = "SUPERVISOR_SHARE"
|
|
||||||
ENV_NAME = "SUPERVISOR_NAME"
|
|
||||||
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
|
|
||||||
|
|
||||||
MACHINE_ID = Path("/etc/machine-id")
|
|
||||||
|
|
||||||
|
|
||||||
async def initialize_coresys():
|
|
||||||
"""Initialize HassIO coresys/objects."""
|
|
||||||
coresys = CoreSys()
|
|
||||||
|
|
||||||
# Initialize core objects
|
|
||||||
coresys.core = HassIO(coresys)
|
|
||||||
coresys.arch = CpuArch(coresys)
|
|
||||||
coresys.auth = Auth(coresys)
|
|
||||||
coresys.updater = Updater(coresys)
|
|
||||||
coresys.api = RestAPI(coresys)
|
|
||||||
coresys.supervisor = Supervisor(coresys)
|
|
||||||
coresys.homeassistant = HomeAssistant(coresys)
|
|
||||||
coresys.addons = AddonManager(coresys)
|
|
||||||
coresys.snapshots = SnapshotManager(coresys)
|
|
||||||
coresys.host = HostManager(coresys)
|
|
||||||
coresys.ingress = Ingress(coresys)
|
|
||||||
coresys.tasks = Tasks(coresys)
|
|
||||||
coresys.services = ServiceManager(coresys)
|
|
||||||
coresys.discovery = Discovery(coresys)
|
|
||||||
coresys.dbus = DBusManager(coresys)
|
|
||||||
coresys.hassos = HassOS(coresys)
|
|
||||||
|
|
||||||
# bootstrap config
|
|
||||||
initialize_system_data(coresys)
|
|
||||||
|
|
||||||
# Set Machine/Host ID
|
|
||||||
if MACHINE_ID.exists():
|
|
||||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
|
||||||
|
|
||||||
return coresys
|
|
||||||
|
|
||||||
|
|
||||||
def initialize_system_data(coresys):
|
|
||||||
"""Set up the default configuration and create folders."""
|
|
||||||
config = coresys.config
|
|
||||||
|
|
||||||
# Home Assistant configuration folder
|
|
||||||
if not config.path_homeassistant.is_dir():
|
|
||||||
_LOGGER.info(
|
|
||||||
"Create Home Assistant configuration folder %s", config.path_homeassistant
|
|
||||||
)
|
|
||||||
config.path_homeassistant.mkdir()
|
|
||||||
|
|
||||||
# hassio ssl folder
|
|
||||||
if not config.path_ssl.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io SSL/TLS folder %s", config.path_ssl)
|
|
||||||
config.path_ssl.mkdir()
|
|
||||||
|
|
||||||
# hassio addon data folder
|
|
||||||
if not config.path_addons_data.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io Add-on data folder %s", config.path_addons_data)
|
|
||||||
config.path_addons_data.mkdir(parents=True)
|
|
||||||
|
|
||||||
if not config.path_addons_local.is_dir():
|
|
||||||
_LOGGER.info(
|
|
||||||
"Create Hass.io Add-on local repository folder %s", config.path_addons_local
|
|
||||||
)
|
|
||||||
config.path_addons_local.mkdir(parents=True)
|
|
||||||
|
|
||||||
if not config.path_addons_git.is_dir():
|
|
||||||
_LOGGER.info(
|
|
||||||
"Create Hass.io Add-on git repositories folder %s", config.path_addons_git
|
|
||||||
)
|
|
||||||
config.path_addons_git.mkdir(parents=True)
|
|
||||||
|
|
||||||
# hassio tmp folder
|
|
||||||
if not config.path_tmp.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io temp folder %s", config.path_tmp)
|
|
||||||
config.path_tmp.mkdir(parents=True)
|
|
||||||
|
|
||||||
# hassio backup folder
|
|
||||||
if not config.path_backup.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io backup folder %s", config.path_backup)
|
|
||||||
config.path_backup.mkdir()
|
|
||||||
|
|
||||||
# share folder
|
|
||||||
if not config.path_share.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io share folder %s", config.path_share)
|
|
||||||
config.path_share.mkdir()
|
|
||||||
|
|
||||||
# apparmor folder
|
|
||||||
if not config.path_apparmor.is_dir():
|
|
||||||
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
|
||||||
config.path_apparmor.mkdir()
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
|
|
||||||
def migrate_system_env(coresys):
|
|
||||||
"""Cleanup some stuff after update."""
|
|
||||||
config = coresys.config
|
|
||||||
|
|
||||||
# hass.io 0.37 -> 0.38
|
|
||||||
old_build = Path(config.path_hassio, "addons/build")
|
|
||||||
if old_build.is_dir():
|
|
||||||
try:
|
|
||||||
old_build.rmdir()
|
|
||||||
except OSError:
|
|
||||||
_LOGGER.warning("Can't cleanup old Add-on build directory")
|
|
||||||
|
|
||||||
|
|
||||||
def initialize_logging():
|
|
||||||
"""Setup the logging."""
|
|
||||||
logging.basicConfig(level=logging.INFO)
|
|
||||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
|
||||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
|
||||||
datefmt = "%y-%m-%d %H:%M:%S"
|
|
||||||
|
|
||||||
# suppress overly verbose logs from libraries that aren't helpful
|
|
||||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
logging.getLogger().handlers[0].setFormatter(
|
|
||||||
ColoredFormatter(
|
|
||||||
colorfmt,
|
|
||||||
datefmt=datefmt,
|
|
||||||
reset=True,
|
|
||||||
log_colors={
|
|
||||||
"DEBUG": "cyan",
|
|
||||||
"INFO": "green",
|
|
||||||
"WARNING": "yellow",
|
|
||||||
"ERROR": "red",
|
|
||||||
"CRITICAL": "red",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def check_environment():
|
|
||||||
"""Check if all environment are exists."""
|
|
||||||
# check environment variables
|
|
||||||
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
|
|
||||||
try:
|
|
||||||
os.environ[key]
|
|
||||||
except KeyError:
|
|
||||||
_LOGGER.fatal("Can't find %s in env!", key)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check docker socket
|
|
||||||
if not SOCKET_DOCKER.is_socket():
|
|
||||||
_LOGGER.fatal("Can't find Docker socket!")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check socat exec
|
|
||||||
if not shutil.which("socat"):
|
|
||||||
_LOGGER.fatal("Can't find socat!")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check socat exec
|
|
||||||
if not shutil.which("gdbus"):
|
|
||||||
_LOGGER.fatal("Can't find gdbus!")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def reg_signal(loop):
|
|
||||||
"""Register SIGTERM and SIGKILL to stop system."""
|
|
||||||
try:
|
|
||||||
loop.add_signal_handler(signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
|
||||||
except (ValueError, RuntimeError):
|
|
||||||
_LOGGER.warning("Could not bind to SIGTERM")
|
|
||||||
|
|
||||||
try:
|
|
||||||
loop.add_signal_handler(signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
|
||||||
except (ValueError, RuntimeError):
|
|
||||||
_LOGGER.warning("Could not bind to SIGHUP")
|
|
||||||
|
|
||||||
try:
|
|
||||||
loop.add_signal_handler(signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
|
||||||
except (ValueError, RuntimeError):
|
|
||||||
_LOGGER.warning("Could not bind to SIGINT")
|
|
206
hassio/config.py
206
hassio/config.py
@@ -1,206 +0,0 @@
|
|||||||
"""Bootstrap Hass.io."""
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
from pathlib import Path, PurePath
|
|
||||||
|
|
||||||
import pytz
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
|
||||||
ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
|
||||||
from .utils.dt import parse_datetime
|
|
||||||
from .utils.json import JsonConfig
|
|
||||||
from .validate import SCHEMA_HASSIO_CONFIG
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
HOMEASSISTANT_CONFIG = PurePath('homeassistant')
|
|
||||||
|
|
||||||
HASSIO_SSL = PurePath("ssl")
|
|
||||||
|
|
||||||
ADDONS_CORE = PurePath("addons/core")
|
|
||||||
ADDONS_LOCAL = PurePath("addons/local")
|
|
||||||
ADDONS_GIT = PurePath("addons/git")
|
|
||||||
ADDONS_DATA = PurePath("addons/data")
|
|
||||||
|
|
||||||
BACKUP_DATA = PurePath("backup")
|
|
||||||
SHARE_DATA = PurePath("share")
|
|
||||||
TMP_DATA = PurePath("tmp")
|
|
||||||
APPARMOR_DATA = PurePath("apparmor")
|
|
||||||
|
|
||||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
|
||||||
|
|
||||||
RE_TIMEZONE = re.compile(r"time_zone: (?P<timezone>[\w/\-+]+)")
|
|
||||||
|
|
||||||
|
|
||||||
class CoreConfig(JsonConfig):
|
|
||||||
"""Hold all core config data."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize config object."""
|
|
||||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def timezone(self):
|
|
||||||
"""Return system timezone."""
|
|
||||||
config_file = Path(self.path_homeassistant, 'configuration.yaml')
|
|
||||||
try:
|
|
||||||
assert config_file.exists()
|
|
||||||
configuration = config_file.read_text()
|
|
||||||
|
|
||||||
data = RE_TIMEZONE.search(configuration)
|
|
||||||
assert data
|
|
||||||
|
|
||||||
timezone = data.group('timezone')
|
|
||||||
pytz.timezone(timezone)
|
|
||||||
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
|
|
||||||
_LOGGER.debug("Can't parse Home Assistant timezone")
|
|
||||||
return self._data[ATTR_TIMEZONE]
|
|
||||||
|
|
||||||
return timezone
|
|
||||||
|
|
||||||
@timezone.setter
|
|
||||||
def timezone(self, value):
|
|
||||||
"""Set system timezone."""
|
|
||||||
self._data[ATTR_TIMEZONE] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def wait_boot(self):
|
|
||||||
"""Return wait time for auto boot stages."""
|
|
||||||
return self._data[ATTR_WAIT_BOOT]
|
|
||||||
|
|
||||||
@wait_boot.setter
|
|
||||||
def wait_boot(self, value):
|
|
||||||
"""Set wait boot time."""
|
|
||||||
self._data[ATTR_WAIT_BOOT] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_boot(self):
|
|
||||||
"""Return last boot datetime."""
|
|
||||||
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
|
||||||
|
|
||||||
boot_time = parse_datetime(boot_str)
|
|
||||||
if not boot_time:
|
|
||||||
return datetime.utcfromtimestamp(1)
|
|
||||||
return boot_time
|
|
||||||
|
|
||||||
@last_boot.setter
|
|
||||||
def last_boot(self, value):
|
|
||||||
"""Set last boot datetime."""
|
|
||||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_hassio(self):
|
|
||||||
"""Return Hass.io data path."""
|
|
||||||
return HASSIO_DATA
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_hassio(self):
|
|
||||||
"""Return Hass.io data path external for Docker."""
|
|
||||||
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_homeassistant(self):
|
|
||||||
"""Return config path external for Docker."""
|
|
||||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_homeassistant(self):
|
|
||||||
"""Return config path inside supervisor."""
|
|
||||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_ssl(self):
|
|
||||||
"""Return SSL path external for Docker."""
|
|
||||||
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_ssl(self):
|
|
||||||
"""Return SSL path inside supervisor."""
|
|
||||||
return Path(HASSIO_DATA, HASSIO_SSL)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_addons_core(self):
|
|
||||||
"""Return git path for core Add-ons."""
|
|
||||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_addons_git(self):
|
|
||||||
"""Return path for Git Add-on."""
|
|
||||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_addons_local(self):
|
|
||||||
"""Return path for custom Add-ons."""
|
|
||||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_addons_local(self):
|
|
||||||
"""Return path for custom Add-ons."""
|
|
||||||
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_addons_data(self):
|
|
||||||
"""Return root Add-on data folder."""
|
|
||||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_addons_data(self):
|
|
||||||
"""Return root add-on data folder external for Docker."""
|
|
||||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_tmp(self):
|
|
||||||
"""Return Hass.io temp folder."""
|
|
||||||
return Path(HASSIO_DATA, TMP_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_tmp(self):
|
|
||||||
"""Return Hass.io temp folder for Docker."""
|
|
||||||
return PurePath(self.path_extern_hassio, TMP_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_backup(self):
|
|
||||||
"""Return root backup data folder."""
|
|
||||||
return Path(HASSIO_DATA, BACKUP_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_backup(self):
|
|
||||||
"""Return root backup data folder external for Docker."""
|
|
||||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_share(self):
|
|
||||||
"""Return root share data folder."""
|
|
||||||
return Path(HASSIO_DATA, SHARE_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_apparmor(self):
|
|
||||||
"""Return root Apparmor profile folder."""
|
|
||||||
return Path(HASSIO_DATA, APPARMOR_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path_extern_share(self):
|
|
||||||
"""Return root share data folder external for Docker."""
|
|
||||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def addons_repositories(self):
|
|
||||||
"""Return list of custom Add-on repositories."""
|
|
||||||
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
|
||||||
|
|
||||||
def add_addon_repository(self, repo):
|
|
||||||
"""Add a custom repository to list."""
|
|
||||||
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
|
||||||
|
|
||||||
def drop_addon_repository(self, repo):
|
|
||||||
"""Remove a custom repository from list."""
|
|
||||||
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
|
155
hassio/core.py
155
hassio/core.py
@@ -1,155 +0,0 @@
|
|||||||
"""Main file for Hass.io."""
|
|
||||||
from contextlib import suppress
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
from .coresys import CoreSysAttributes
|
|
||||||
from .const import (
|
|
||||||
STARTUP_SYSTEM,
|
|
||||||
STARTUP_SERVICES,
|
|
||||||
STARTUP_APPLICATION,
|
|
||||||
STARTUP_INITIALIZE,
|
|
||||||
)
|
|
||||||
from .exceptions import HassioError, HomeAssistantError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class HassIO(CoreSysAttributes):
|
|
||||||
"""Main object of Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize Hass.io object."""
|
|
||||||
self.coresys = coresys
|
|
||||||
|
|
||||||
async def setup(self):
|
|
||||||
"""Setup HassIO orchestration."""
|
|
||||||
# Load Supervisor
|
|
||||||
await self.sys_supervisor.load()
|
|
||||||
|
|
||||||
# Load DBus
|
|
||||||
await self.sys_dbus.load()
|
|
||||||
|
|
||||||
# Load Host
|
|
||||||
await self.sys_host.load()
|
|
||||||
|
|
||||||
# Load Home Assistant
|
|
||||||
await self.sys_homeassistant.load()
|
|
||||||
|
|
||||||
# Load CPU/Arch
|
|
||||||
await self.sys_arch.load()
|
|
||||||
|
|
||||||
# Load HassOS
|
|
||||||
await self.sys_hassos.load()
|
|
||||||
|
|
||||||
# Load Add-ons
|
|
||||||
await self.sys_addons.load()
|
|
||||||
|
|
||||||
# rest api views
|
|
||||||
await self.sys_api.load()
|
|
||||||
|
|
||||||
# load last available data
|
|
||||||
await self.sys_updater.load()
|
|
||||||
|
|
||||||
# load last available data
|
|
||||||
await self.sys_snapshots.load()
|
|
||||||
|
|
||||||
# load services
|
|
||||||
await self.sys_services.load()
|
|
||||||
|
|
||||||
# Load discovery
|
|
||||||
await self.sys_discovery.load()
|
|
||||||
|
|
||||||
# Load ingress
|
|
||||||
await self.sys_ingress.load()
|
|
||||||
|
|
||||||
# start dns forwarding
|
|
||||||
self.sys_create_task(self.sys_dns.start())
|
|
||||||
|
|
||||||
async def start(self):
|
|
||||||
"""Start Hass.io orchestration."""
|
|
||||||
# on release channel, try update itself
|
|
||||||
if self.sys_supervisor.need_update:
|
|
||||||
if self.sys_dev:
|
|
||||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
|
||||||
elif await self.sys_supervisor.update():
|
|
||||||
return
|
|
||||||
|
|
||||||
# start api
|
|
||||||
await self.sys_api.start()
|
|
||||||
|
|
||||||
# start addon mark as initialize
|
|
||||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# HomeAssistant is already running / supervisor have only reboot
|
|
||||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
|
||||||
_LOGGER.info("Hass.io reboot detected")
|
|
||||||
return
|
|
||||||
|
|
||||||
# reset register services / discovery
|
|
||||||
self.sys_services.reset()
|
|
||||||
|
|
||||||
# start addon mark as system
|
|
||||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
|
||||||
|
|
||||||
# start addon mark as services
|
|
||||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
|
||||||
|
|
||||||
# run HomeAssistant
|
|
||||||
if self.sys_homeassistant.boot:
|
|
||||||
with suppress(HomeAssistantError):
|
|
||||||
await self.sys_homeassistant.start()
|
|
||||||
|
|
||||||
# start addon mark as application
|
|
||||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
|
||||||
|
|
||||||
# store new last boot
|
|
||||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
|
||||||
self.sys_config.save_data()
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# Add core tasks into scheduler
|
|
||||||
await self.sys_tasks.load()
|
|
||||||
|
|
||||||
# If landingpage / run upgrade in background
|
|
||||||
if self.sys_homeassistant.version == "landingpage":
|
|
||||||
self.sys_create_task(self.sys_homeassistant.install())
|
|
||||||
|
|
||||||
_LOGGER.info("Hass.io is up and running")
|
|
||||||
|
|
||||||
async def stop(self):
|
|
||||||
"""Stop a running orchestration."""
|
|
||||||
# don't process scheduler anymore
|
|
||||||
self.sys_scheduler.suspend = True
|
|
||||||
|
|
||||||
# process async stop tasks
|
|
||||||
try:
|
|
||||||
with async_timeout.timeout(10):
|
|
||||||
await asyncio.wait(
|
|
||||||
[
|
|
||||||
self.sys_api.stop(),
|
|
||||||
self.sys_dns.stop(),
|
|
||||||
self.sys_websession.close(),
|
|
||||||
self.sys_websession_ssl.close(),
|
|
||||||
self.sys_ingress.unload(),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
_LOGGER.warning("Force Shutdown!")
|
|
||||||
|
|
||||||
_LOGGER.info("Hass.io is down")
|
|
||||||
|
|
||||||
async def shutdown(self):
|
|
||||||
"""Shutdown all running containers in correct order."""
|
|
||||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
|
||||||
|
|
||||||
# Close Home Assistant
|
|
||||||
with suppress(HassioError):
|
|
||||||
await self.sys_homeassistant.stop()
|
|
||||||
|
|
||||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
|
||||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
|
||||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
|
@@ -1,39 +0,0 @@
|
|||||||
"""D-Bus interface objects."""
|
|
||||||
|
|
||||||
from .systemd import Systemd
|
|
||||||
from .hostname import Hostname
|
|
||||||
from .rauc import Rauc
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
|
|
||||||
class DBusManager(CoreSysAttributes):
|
|
||||||
"""A DBus Interface handler."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize D-Bus interface."""
|
|
||||||
self.coresys = coresys
|
|
||||||
|
|
||||||
self._systemd = Systemd()
|
|
||||||
self._hostname = Hostname()
|
|
||||||
self._rauc = Rauc()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def systemd(self):
|
|
||||||
"""Return the systemd interface."""
|
|
||||||
return self._systemd
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hostname(self):
|
|
||||||
"""Return the hostname interface."""
|
|
||||||
return self._hostname
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rauc(self):
|
|
||||||
"""Return the rauc interface."""
|
|
||||||
return self._rauc
|
|
||||||
|
|
||||||
async def load(self):
|
|
||||||
"""Connect interfaces to D-Bus."""
|
|
||||||
await self.systemd.connect()
|
|
||||||
await self.hostname.connect()
|
|
||||||
await self.rauc.connect()
|
|
@@ -1,39 +0,0 @@
|
|||||||
"""D-Bus interface for hostname."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .interface import DBusInterface
|
|
||||||
from .utils import dbus_connected
|
|
||||||
from ..exceptions import DBusError
|
|
||||||
from ..utils.gdbus import DBus
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DBUS_NAME = 'org.freedesktop.hostname1'
|
|
||||||
DBUS_OBJECT = '/org/freedesktop/hostname1'
|
|
||||||
|
|
||||||
|
|
||||||
class Hostname(DBusInterface):
|
|
||||||
"""Handle D-Bus interface for hostname/system."""
|
|
||||||
|
|
||||||
async def connect(self):
|
|
||||||
"""Connect to system's D-Bus."""
|
|
||||||
try:
|
|
||||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
|
||||||
except DBusError:
|
|
||||||
_LOGGER.warning("Can't connect to hostname")
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def set_static_hostname(self, hostname):
|
|
||||||
"""Change local hostname.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.SetStaticHostname(hostname, False)
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def get_properties(self):
|
|
||||||
"""Return local host informations.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.get_properties(DBUS_NAME)
|
|
@@ -1,55 +0,0 @@
|
|||||||
"""D-Bus interface for rauc."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .interface import DBusInterface
|
|
||||||
from .utils import dbus_connected
|
|
||||||
from ..exceptions import DBusError
|
|
||||||
from ..utils.gdbus import DBus
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
DBUS_NAME = 'de.pengutronix.rauc'
|
|
||||||
DBUS_OBJECT = '/'
|
|
||||||
|
|
||||||
|
|
||||||
class Rauc(DBusInterface):
|
|
||||||
"""Handle D-Bus interface for rauc."""
|
|
||||||
|
|
||||||
async def connect(self):
|
|
||||||
"""Connect to D-Bus."""
|
|
||||||
try:
|
|
||||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
|
||||||
except DBusError:
|
|
||||||
_LOGGER.warning("Can't connect to rauc")
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def install(self, raucb_file):
|
|
||||||
"""Install rauc bundle file.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.Installer.Install(raucb_file)
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def get_slot_status(self):
|
|
||||||
"""Get slot status.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.Installer.GetSlotStatus()
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def get_properties(self):
|
|
||||||
"""Return rauc informations.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
|
||||||
|
|
||||||
@dbus_connected
|
|
||||||
def signal_completed(self):
|
|
||||||
"""Return a signal wrapper for completed signal.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
|
@@ -1,137 +0,0 @@
|
|||||||
"""Init file for Hass.io Docker object."""
|
|
||||||
import logging
|
|
||||||
from contextlib import suppress
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
import attr
|
|
||||||
import docker
|
|
||||||
|
|
||||||
from ..const import SOCKET_DOCKER
|
|
||||||
from ..exceptions import DockerAPIError
|
|
||||||
from .network import DockerNetwork
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(frozen=True)
|
|
||||||
class CommandReturn:
|
|
||||||
"""Return object from command run."""
|
|
||||||
|
|
||||||
exit_code: int = attr.ib()
|
|
||||||
output: bytes = attr.ib()
|
|
||||||
|
|
||||||
|
|
||||||
class DockerAPI:
|
|
||||||
"""Docker Hass.io wrapper.
|
|
||||||
|
|
||||||
This class is not AsyncIO safe!
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize Docker base wrapper."""
|
|
||||||
self.docker: docker.DockerClient = docker.DockerClient(
|
|
||||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version="auto", timeout=900
|
|
||||||
)
|
|
||||||
self.network: DockerNetwork = DockerNetwork(self.docker)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def images(self) -> docker.models.images.ImageCollection:
|
|
||||||
"""Return API images."""
|
|
||||||
return self.docker.images
|
|
||||||
|
|
||||||
@property
|
|
||||||
def containers(self) -> docker.models.containers.ContainerCollection:
|
|
||||||
"""Return API containers."""
|
|
||||||
return self.docker.containers
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api(self) -> docker.APIClient:
|
|
||||||
"""Return API containers."""
|
|
||||||
return self.docker.api
|
|
||||||
|
|
||||||
def run(
|
|
||||||
self, image: str, **kwargs: Dict[str, Any]
|
|
||||||
) -> docker.models.containers.Container:
|
|
||||||
""""Create a Docker container and run it.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
name = kwargs.get("name", image)
|
|
||||||
network_mode = kwargs.get("network_mode")
|
|
||||||
hostname = kwargs.get("hostname")
|
|
||||||
|
|
||||||
# Setup network
|
|
||||||
kwargs["dns_search"] = ["."]
|
|
||||||
if network_mode:
|
|
||||||
kwargs["dns"] = [str(self.network.supervisor)]
|
|
||||||
kwargs["dns_opt"] = ["ndots:0"]
|
|
||||||
else:
|
|
||||||
kwargs["network"] = None
|
|
||||||
|
|
||||||
# Create container
|
|
||||||
try:
|
|
||||||
container = self.docker.containers.create(
|
|
||||||
image, use_config_proxy=False, **kwargs
|
|
||||||
)
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
# Attach network
|
|
||||||
if not network_mode:
|
|
||||||
alias = [hostname] if hostname else None
|
|
||||||
try:
|
|
||||||
self.network.attach_container(container, alias=alias)
|
|
||||||
except DockerAPIError:
|
|
||||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
|
||||||
else:
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
self.network.detach_default_bridge(container)
|
|
||||||
|
|
||||||
# Run container
|
|
||||||
try:
|
|
||||||
container.start()
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't start %s: %s", name, err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
# Update metadata
|
|
||||||
with suppress(docker.errors.DockerException):
|
|
||||||
container.reload()
|
|
||||||
|
|
||||||
return container
|
|
||||||
|
|
||||||
def run_command(
|
|
||||||
self, image: str, command: Optional[str] = None, **kwargs: Dict[str, Any]
|
|
||||||
) -> CommandReturn:
|
|
||||||
"""Create a temporary container and run command.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
stdout = kwargs.get("stdout", True)
|
|
||||||
stderr = kwargs.get("stderr", True)
|
|
||||||
|
|
||||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
|
||||||
try:
|
|
||||||
container = self.docker.containers.run(
|
|
||||||
image,
|
|
||||||
command=command,
|
|
||||||
network=self.network.name,
|
|
||||||
use_config_proxy=False,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# wait until command is done
|
|
||||||
result = container.wait()
|
|
||||||
output = container.logs(stdout=stdout, stderr=stderr)
|
|
||||||
|
|
||||||
except docker.errors.DockerException as err:
|
|
||||||
_LOGGER.error("Can't execute command: %s", err)
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# cleanup container
|
|
||||||
with suppress(docker.errors.DockerException):
|
|
||||||
container.remove(force=True)
|
|
||||||
|
|
||||||
return CommandReturn(result.get("StatusCode"), output)
|
|
@@ -1,38 +0,0 @@
|
|||||||
"""HassOS Cli docker object."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import docker
|
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from .interface import DockerInterface
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
|
||||||
"""Docker Hass.io wrapper for HassOS Cli."""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def image(self):
|
|
||||||
"""Return name of HassOS CLI image."""
|
|
||||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
|
||||||
|
|
||||||
def _stop(self, remove_container=True):
|
|
||||||
"""Don't need stop."""
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _attach(self):
|
|
||||||
"""Attach to running Docker container.
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
image = self.sys_docker.images.get(self.image)
|
|
||||||
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
|
|
||||||
|
|
||||||
else:
|
|
||||||
self._meta = image.attrs
|
|
||||||
_LOGGER.info(
|
|
||||||
"Found HassOS CLI %s with version %s", self.image, self.version
|
|
||||||
)
|
|
@@ -1,51 +0,0 @@
|
|||||||
"""Init file for Hass.io Docker object."""
|
|
||||||
from ipaddress import IPv4Address
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
import docker
|
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from ..exceptions import DockerAPIError
|
|
||||||
from .interface import DockerInterface
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
|
||||||
"""Docker Hass.io wrapper for Supervisor."""
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self) -> str:
|
|
||||||
"""Return name of Docker container."""
|
|
||||||
return os.environ["SUPERVISOR_NAME"]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ip_address(self) -> IPv4Address:
|
|
||||||
"""Return IP address of this container."""
|
|
||||||
return self.sys_docker.network.supervisor
|
|
||||||
|
|
||||||
def _attach(self) -> None:
|
|
||||||
"""Attach to running docker container.
|
|
||||||
|
|
||||||
Need run inside executor.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
docker_container = self.sys_docker.containers.get(self.name)
|
|
||||||
except docker.errors.DockerException:
|
|
||||||
raise DockerAPIError() from None
|
|
||||||
|
|
||||||
self._meta = docker_container.attrs
|
|
||||||
_LOGGER.info(
|
|
||||||
"Attach to Supervisor %s with version %s", self.image, self.version
|
|
||||||
)
|
|
||||||
|
|
||||||
# If already attach
|
|
||||||
if docker_container in self.sys_docker.network.containers:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Attach to network
|
|
||||||
_LOGGER.info("Connect Supervisor to Hass.io Network")
|
|
||||||
self.sys_docker.network.attach_container(
|
|
||||||
docker_container, alias=["hassio"], ipv4=self.sys_docker.network.supervisor
|
|
||||||
)
|
|
@@ -1,554 +0,0 @@
|
|||||||
"""Home Assistant control object."""
|
|
||||||
import asyncio
|
|
||||||
from contextlib import asynccontextmanager, suppress
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from ipaddress import IPv4Address
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from pathlib import Path
|
|
||||||
import re
|
|
||||||
import secrets
|
|
||||||
import socket
|
|
||||||
import time
|
|
||||||
from typing import Any, AsyncContextManager, Awaitable, Dict, Optional
|
|
||||||
from uuid import UUID
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
from aiohttp import hdrs
|
|
||||||
import attr
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
ATTR_ACCESS_TOKEN,
|
|
||||||
ATTR_BOOT,
|
|
||||||
ATTR_IMAGE,
|
|
||||||
ATTR_LAST_VERSION,
|
|
||||||
ATTR_PASSWORD,
|
|
||||||
ATTR_PORT,
|
|
||||||
ATTR_REFRESH_TOKEN,
|
|
||||||
ATTR_SSL,
|
|
||||||
ATTR_UUID,
|
|
||||||
ATTR_WAIT_BOOT,
|
|
||||||
ATTR_WATCHDOG,
|
|
||||||
FILE_HASSIO_HOMEASSISTANT,
|
|
||||||
HEADER_HA_ACCESS,
|
|
||||||
)
|
|
||||||
from .coresys import CoreSys, CoreSysAttributes
|
|
||||||
from .docker.homeassistant import DockerHomeAssistant
|
|
||||||
from .docker.stats import DockerStats
|
|
||||||
from .exceptions import (
|
|
||||||
DockerAPIError,
|
|
||||||
HomeAssistantAPIError,
|
|
||||||
HomeAssistantAuthError,
|
|
||||||
HomeAssistantError,
|
|
||||||
HomeAssistantUpdateError,
|
|
||||||
)
|
|
||||||
from .utils import convert_to_ascii, process_lock
|
|
||||||
from .utils.json import JsonConfig
|
|
||||||
from .validate import SCHEMA_HASS_CONFIG
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
|
||||||
|
|
||||||
|
|
||||||
@attr.s(frozen=True)
|
|
||||||
class ConfigResult:
|
|
||||||
"""Return object from config check."""
|
|
||||||
valid = attr.ib()
|
|
||||||
log = attr.ib()
|
|
||||||
|
|
||||||
|
|
||||||
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
|
||||||
"""Home Assistant core object for handle it."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
|
||||||
"""Initialize Home Assistant object."""
|
|
||||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self.instance: DockerHomeAssistant = DockerHomeAssistant(coresys)
|
|
||||||
self.lock: asyncio.Lock = asyncio.Lock(loop=coresys.loop)
|
|
||||||
self._error_state: bool = False
|
|
||||||
|
|
||||||
# We don't persist access tokens. Instead we fetch new ones when needed
|
|
||||||
self.access_token: Optional[str] = None
|
|
||||||
self._access_token_expires: Optional[datetime] = None
|
|
||||||
|
|
||||||
async def load(self) -> None:
|
|
||||||
"""Prepare Home Assistant object."""
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
await self.instance.attach()
|
|
||||||
return
|
|
||||||
|
|
||||||
_LOGGER.info("No Home Assistant Docker image %s found.", self.image)
|
|
||||||
await self.install_landingpage()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def machine(self) -> str:
|
|
||||||
"""Return the system machines."""
|
|
||||||
return self.instance.machine
|
|
||||||
|
|
||||||
@property
|
|
||||||
def arch(self) -> str:
|
|
||||||
"""Return arch of running Home Assistant."""
|
|
||||||
return self.instance.arch
|
|
||||||
|
|
||||||
@property
|
|
||||||
def error_state(self) -> bool:
|
|
||||||
"""Return True if system is in error."""
|
|
||||||
return self._error_state
|
|
||||||
|
|
||||||
@property
|
|
||||||
def ip_address(self) -> IPv4Address:
|
|
||||||
"""Return IP of Home Assistant instance."""
|
|
||||||
return self.instance.ip_address
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api_port(self) -> int:
|
|
||||||
"""Return network port to Home Assistant instance."""
|
|
||||||
return self._data[ATTR_PORT]
|
|
||||||
|
|
||||||
@api_port.setter
|
|
||||||
def api_port(self, value: int) -> None:
|
|
||||||
"""Set network port for Home Assistant instance."""
|
|
||||||
self._data[ATTR_PORT] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api_password(self) -> str:
|
|
||||||
"""Return password for Home Assistant instance."""
|
|
||||||
return self._data.get(ATTR_PASSWORD)
|
|
||||||
|
|
||||||
@api_password.setter
|
|
||||||
def api_password(self, value: str):
|
|
||||||
"""Set password for Home Assistant instance."""
|
|
||||||
self._data[ATTR_PASSWORD] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api_ssl(self) -> bool:
|
|
||||||
"""Return if we need ssl to Home Assistant instance."""
|
|
||||||
return self._data[ATTR_SSL]
|
|
||||||
|
|
||||||
@api_ssl.setter
|
|
||||||
def api_ssl(self, value: bool):
|
|
||||||
"""Set SSL for Home Assistant instance."""
|
|
||||||
self._data[ATTR_SSL] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def api_url(self) -> str:
|
|
||||||
"""Return API url to Home Assistant."""
|
|
||||||
return "{}://{}:{}".format('https' if self.api_ssl else 'http',
|
|
||||||
self.ip_address, self.api_port)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def watchdog(self) -> bool:
|
|
||||||
"""Return True if the watchdog should protect Home Assistant."""
|
|
||||||
return self._data[ATTR_WATCHDOG]
|
|
||||||
|
|
||||||
@watchdog.setter
|
|
||||||
def watchdog(self, value: bool):
|
|
||||||
"""Return True if the watchdog should protect Home Assistant."""
|
|
||||||
self._data[ATTR_WATCHDOG] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def wait_boot(self) -> int:
|
|
||||||
"""Return time to wait for Home Assistant startup."""
|
|
||||||
return self._data[ATTR_WAIT_BOOT]
|
|
||||||
|
|
||||||
@wait_boot.setter
|
|
||||||
def wait_boot(self, value: int):
|
|
||||||
"""Set time to wait for Home Assistant startup."""
|
|
||||||
self._data[ATTR_WAIT_BOOT] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version(self) -> str:
|
|
||||||
"""Return version of running Home Assistant."""
|
|
||||||
return self.instance.version
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_version(self) -> str:
|
|
||||||
"""Return last available version of Home Assistant."""
|
|
||||||
if self.is_custom_image:
|
|
||||||
return self._data.get(ATTR_LAST_VERSION)
|
|
||||||
return self.sys_updater.version_homeassistant
|
|
||||||
|
|
||||||
@last_version.setter
|
|
||||||
def last_version(self, value: str):
|
|
||||||
"""Set last available version of Home Assistant."""
|
|
||||||
if value:
|
|
||||||
self._data[ATTR_LAST_VERSION] = value
|
|
||||||
else:
|
|
||||||
self._data.pop(ATTR_LAST_VERSION, None)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def image(self) -> str:
|
|
||||||
"""Return image name of the Home Assistant container."""
|
|
||||||
if self._data.get(ATTR_IMAGE):
|
|
||||||
return self._data[ATTR_IMAGE]
|
|
||||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
|
||||||
|
|
||||||
@image.setter
|
|
||||||
def image(self, value: str):
|
|
||||||
"""Set image name of Home Assistant container."""
|
|
||||||
if value:
|
|
||||||
self._data[ATTR_IMAGE] = value
|
|
||||||
else:
|
|
||||||
self._data.pop(ATTR_IMAGE, None)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def is_custom_image(self) -> bool:
|
|
||||||
"""Return True if a custom image is used."""
|
|
||||||
return all(
|
|
||||||
attr in self._data for attr in (ATTR_IMAGE, ATTR_LAST_VERSION))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def boot(self) -> bool:
|
|
||||||
"""Return True if Home Assistant boot is enabled."""
|
|
||||||
return self._data[ATTR_BOOT]
|
|
||||||
|
|
||||||
@boot.setter
|
|
||||||
def boot(self, value: bool):
|
|
||||||
"""Set Home Assistant boot options."""
|
|
||||||
self._data[ATTR_BOOT] = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def uuid(self) -> UUID:
|
|
||||||
"""Return a UUID of this Home Assistant instance."""
|
|
||||||
return self._data[ATTR_UUID]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hassio_token(self) -> str:
|
|
||||||
"""Return an access token for the Hass.io API."""
|
|
||||||
return self._data.get(ATTR_ACCESS_TOKEN)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def refresh_token(self) -> str:
|
|
||||||
"""Return the refresh token to authenticate with Home Assistant."""
|
|
||||||
return self._data.get(ATTR_REFRESH_TOKEN)
|
|
||||||
|
|
||||||
@refresh_token.setter
|
|
||||||
def refresh_token(self, value: str):
|
|
||||||
"""Set Home Assistant refresh_token."""
|
|
||||||
self._data[ATTR_REFRESH_TOKEN] = value
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
async def install_landingpage(self) -> None:
|
|
||||||
"""Install a landing page."""
|
|
||||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
|
||||||
while True:
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
await self.instance.install('landingpage')
|
|
||||||
return
|
|
||||||
_LOGGER.warning("Fails install landingpage, retry after 30sec")
|
|
||||||
await asyncio.sleep(30)
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
async def install(self) -> None:
|
|
||||||
"""Install a landing page."""
|
|
||||||
_LOGGER.info("Setup Home Assistant")
|
|
||||||
while True:
|
|
||||||
# read homeassistant tag and install it
|
|
||||||
if not self.last_version:
|
|
||||||
await self.sys_updater.reload()
|
|
||||||
|
|
||||||
tag = self.last_version
|
|
||||||
if tag:
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
await self.instance.install(tag)
|
|
||||||
break
|
|
||||||
_LOGGER.warning("Error on install Home Assistant. Retry in 30sec")
|
|
||||||
await asyncio.sleep(30)
|
|
||||||
|
|
||||||
# finishing
|
|
||||||
_LOGGER.info("Home Assistant docker now installed")
|
|
||||||
try:
|
|
||||||
if not self.boot:
|
|
||||||
return
|
|
||||||
_LOGGER.info("Start Home Assistant")
|
|
||||||
await self._start()
|
|
||||||
except HomeAssistantError:
|
|
||||||
_LOGGER.error("Can't start Home Assistant!")
|
|
||||||
finally:
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
await self.instance.cleanup()
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
async def update(self, version=None) -> None:
|
|
||||||
"""Update HomeAssistant version."""
|
|
||||||
version = version or self.last_version
|
|
||||||
rollback = self.version if not self.error_state else None
|
|
||||||
running = await self.instance.is_running()
|
|
||||||
exists = await self.instance.exists()
|
|
||||||
|
|
||||||
if exists and version == self.instance.version:
|
|
||||||
_LOGGER.warning("Version %s is already installed", version)
|
|
||||||
return
|
|
||||||
|
|
||||||
# process an update
|
|
||||||
async def _update(to_version):
|
|
||||||
"""Run Home Assistant update."""
|
|
||||||
_LOGGER.info("Update Home Assistant to version %s", to_version)
|
|
||||||
try:
|
|
||||||
await self.instance.update(to_version)
|
|
||||||
except DockerAPIError:
|
|
||||||
_LOGGER.warning("Update Home Assistant image fails")
|
|
||||||
raise HomeAssistantUpdateError() from None
|
|
||||||
|
|
||||||
if running:
|
|
||||||
await self._start()
|
|
||||||
_LOGGER.info("Successful run Home Assistant %s", to_version)
|
|
||||||
|
|
||||||
# Update Home Assistant
|
|
||||||
with suppress(HomeAssistantError):
|
|
||||||
await _update(version)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Update going wrong, revert it
|
|
||||||
if self.error_state and rollback:
|
|
||||||
_LOGGER.fatal("HomeAssistant update fails -> rollback!")
|
|
||||||
await _update(rollback)
|
|
||||||
else:
|
|
||||||
raise HomeAssistantUpdateError()
|
|
||||||
|
|
||||||
async def _start(self) -> None:
|
|
||||||
"""Start Home Assistant Docker & wait."""
|
|
||||||
if await self.instance.is_running():
|
|
||||||
_LOGGER.warning("Home Assistant is already running!")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Create new API token
|
|
||||||
self._data[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
|
||||||
self.save_data()
|
|
||||||
|
|
||||||
try:
|
|
||||||
await self.instance.run()
|
|
||||||
except DockerAPIError:
|
|
||||||
raise HomeAssistantError() from None
|
|
||||||
await self._block_till_run()
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
async def start(self) -> None:
|
|
||||||
"""Run Home Assistant docker."""
|
|
||||||
try:
|
|
||||||
if await self.instance.is_running():
|
|
||||||
await self.instance.restart()
|
|
||||||
elif await self.instance.is_initialize():
|
|
||||||
await self.instance.start()
|
|
||||||
else:
|
|
||||||
await self._start()
|
|
||||||
return
|
|
||||||
|
|
||||||
await self._block_till_run()
|
|
||||||
except DockerAPIError:
|
|
||||||
raise HomeAssistantError() from None
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
async def stop(self) -> None:
|
|
||||||
"""Stop Home Assistant Docker.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return await self.instance.stop(remove_container=False)
|
|
||||||
except DockerAPIError:
|
|
||||||
raise HomeAssistantError() from None
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
async def restart(self) -> None:
|
|
||||||
"""Restart Home Assistant Docker."""
|
|
||||||
try:
|
|
||||||
await self.instance.restart()
|
|
||||||
except DockerAPIError:
|
|
||||||
raise HomeAssistantError() from None
|
|
||||||
|
|
||||||
await self._block_till_run()
|
|
||||||
|
|
||||||
@process_lock
|
|
||||||
async def rebuild(self) -> None:
|
|
||||||
"""Rebuild Home Assistant Docker container."""
|
|
||||||
with suppress(DockerAPIError):
|
|
||||||
await self.instance.stop()
|
|
||||||
await self._start()
|
|
||||||
|
|
||||||
def logs(self) -> Awaitable[bytes]:
|
|
||||||
"""Get HomeAssistant docker logs.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.instance.logs()
|
|
||||||
|
|
||||||
async def stats(self) -> DockerStats:
|
|
||||||
"""Return stats of Home Assistant.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return await self.instance.stats()
|
|
||||||
except DockerAPIError:
|
|
||||||
raise HomeAssistantError() from None
|
|
||||||
|
|
||||||
def is_running(self) -> Awaitable[bool]:
|
|
||||||
"""Return True if Docker container is running.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.instance.is_running()
|
|
||||||
|
|
||||||
def is_fails(self) -> Awaitable[bool]:
|
|
||||||
"""Return True if a Docker container is fails state.
|
|
||||||
|
|
||||||
Return a coroutine.
|
|
||||||
"""
|
|
||||||
return self.instance.is_fails()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def in_progress(self) -> bool:
|
|
||||||
"""Return True if a task is in progress."""
|
|
||||||
return self.instance.in_progress or self.lock.locked()
|
|
||||||
|
|
||||||
async def check_config(self) -> ConfigResult:
|
|
||||||
"""Run Home Assistant config check."""
|
|
||||||
result = await self.instance.execute_command(
|
|
||||||
"python3 -m homeassistant -c /config --script check_config")
|
|
||||||
|
|
||||||
# if not valid
|
|
||||||
if result.exit_code is None:
|
|
||||||
_LOGGER.error("Fatal error on config check!")
|
|
||||||
raise HomeAssistantError()
|
|
||||||
|
|
||||||
# parse output
|
|
||||||
log = convert_to_ascii(result.output)
|
|
||||||
if result.exit_code != 0 or RE_YAML_ERROR.search(log):
|
|
||||||
_LOGGER.error("Invalid Home Assistant config found!")
|
|
||||||
return ConfigResult(False, log)
|
|
||||||
|
|
||||||
_LOGGER.info("Home Assistant config is valid")
|
|
||||||
return ConfigResult(True, log)
|
|
||||||
|
|
||||||
async def ensure_access_token(self) -> None:
|
|
||||||
"""Ensures there is an access token."""
|
|
||||||
if self.access_token is not None and self._access_token_expires > datetime.utcnow():
|
|
||||||
return
|
|
||||||
|
|
||||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
|
||||||
async with self.sys_websession_ssl.post(
|
|
||||||
f"{self.api_url}/auth/token",
|
|
||||||
timeout=30,
|
|
||||||
data={
|
|
||||||
"grant_type": "refresh_token",
|
|
||||||
"refresh_token": self.refresh_token
|
|
||||||
}) as resp:
|
|
||||||
if resp.status != 200:
|
|
||||||
_LOGGER.error("Can't update Home Assistant access token!")
|
|
||||||
raise HomeAssistantAuthError()
|
|
||||||
|
|
||||||
_LOGGER.info("Updated Home Assistant API token")
|
|
||||||
tokens = await resp.json()
|
|
||||||
self.access_token = tokens['access_token']
|
|
||||||
self._access_token_expires = \
|
|
||||||
datetime.utcnow() + timedelta(seconds=tokens['expires_in'])
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def make_request(self,
|
|
||||||
method: str,
|
|
||||||
path: str,
|
|
||||||
json: Optional[Dict[str, Any]] = None,
|
|
||||||
content_type: Optional[str] = None,
|
|
||||||
data: Optional[bytes] = None,
|
|
||||||
timeout=30) -> AsyncContextManager[aiohttp.ClientResponse]:
|
|
||||||
"""Async context manager to make a request with right auth."""
|
|
||||||
url = f"{self.api_url}/{path}"
|
|
||||||
headers = {}
|
|
||||||
|
|
||||||
# Passthrough content type
|
|
||||||
if content_type is not None:
|
|
||||||
headers[hdrs.CONTENT_TYPE] = content_type
|
|
||||||
|
|
||||||
# Set old API Password
|
|
||||||
if self.api_password:
|
|
||||||
headers[HEADER_HA_ACCESS] = self.api_password
|
|
||||||
|
|
||||||
for _ in (1, 2):
|
|
||||||
# Prepare Access token
|
|
||||||
if self.refresh_token:
|
|
||||||
await self.ensure_access_token()
|
|
||||||
headers[hdrs.AUTHORIZATION] = f'Bearer {self.access_token}'
|
|
||||||
|
|
||||||
try:
|
|
||||||
async with getattr(self.sys_websession_ssl, method)(
|
|
||||||
url, data=data, timeout=timeout, json=json,
|
|
||||||
headers=headers) as resp:
|
|
||||||
# Access token expired
|
|
||||||
if resp.status == 401 and self.refresh_token:
|
|
||||||
self.access_token = None
|
|
||||||
continue
|
|
||||||
yield resp
|
|
||||||
return
|
|
||||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
|
||||||
_LOGGER.error("Error on call %s: %s", url, err)
|
|
||||||
break
|
|
||||||
|
|
||||||
raise HomeAssistantAPIError()
|
|
||||||
|
|
||||||
async def check_api_state(self) -> bool:
|
|
||||||
"""Return True if Home Assistant up and running."""
|
|
||||||
with suppress(HomeAssistantAPIError):
|
|
||||||
async with self.make_request('get', 'api/') as resp:
|
|
||||||
if resp.status in (200, 201):
|
|
||||||
return True
|
|
||||||
status = resp.status
|
|
||||||
_LOGGER.warning("Home Assistant API config mismatch: %s", status)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def _block_till_run(self) -> None:
|
|
||||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
|
||||||
start_time = time.monotonic()
|
|
||||||
migration_progress = False
|
|
||||||
migration_file = Path(self.sys_config.path_homeassistant,
|
|
||||||
'.migration_progress')
|
|
||||||
|
|
||||||
def check_port():
|
|
||||||
"""Check if port is mapped."""
|
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
||||||
try:
|
|
||||||
result = sock.connect_ex((str(self.ip_address), self.api_port))
|
|
||||||
sock.close()
|
|
||||||
|
|
||||||
# Check if the port is available
|
|
||||||
if result == 0:
|
|
||||||
return True
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
return False
|
|
||||||
|
|
||||||
while True:
|
|
||||||
await asyncio.sleep(5)
|
|
||||||
|
|
||||||
# 1: Check if Container is is_running
|
|
||||||
if not await self.instance.is_running():
|
|
||||||
_LOGGER.error("Home Assistant has crashed!")
|
|
||||||
break
|
|
||||||
|
|
||||||
# 2: Check if API response
|
|
||||||
if await self.sys_run_in_executor(check_port):
|
|
||||||
_LOGGER.info("Detect a running Home Assistant instance")
|
|
||||||
self._error_state = False
|
|
||||||
return
|
|
||||||
|
|
||||||
# 3: Running DB Migration
|
|
||||||
if migration_file.exists():
|
|
||||||
if not migration_progress:
|
|
||||||
migration_progress = True
|
|
||||||
_LOGGER.info("Home Assistant record migration in progress")
|
|
||||||
continue
|
|
||||||
elif migration_progress:
|
|
||||||
migration_progress = False # Reset start time
|
|
||||||
start_time = time.monotonic()
|
|
||||||
_LOGGER.info("Home Assistant record migration done")
|
|
||||||
|
|
||||||
# 4: Timeout
|
|
||||||
if time.monotonic() - start_time > self.wait_boot:
|
|
||||||
_LOGGER.warning("Don't wait anymore of Home Assistant startup!")
|
|
||||||
break
|
|
||||||
|
|
||||||
self._error_state = True
|
|
||||||
raise HomeAssistantError()
|
|
@@ -1,93 +0,0 @@
|
|||||||
"""Host function like audio, D-Bus or systemd."""
|
|
||||||
from contextlib import suppress
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .alsa import AlsaAudio
|
|
||||||
from .apparmor import AppArmorControl
|
|
||||||
from .control import SystemControl
|
|
||||||
from .info import InfoCenter
|
|
||||||
from .services import ServiceManager
|
|
||||||
from ..const import (
|
|
||||||
FEATURES_REBOOT, FEATURES_SHUTDOWN, FEATURES_HOSTNAME, FEATURES_SERVICES,
|
|
||||||
FEATURES_HASSOS)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from ..exceptions import HassioError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class HostManager(CoreSysAttributes):
|
|
||||||
"""Manage supported function from host."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize Host manager."""
|
|
||||||
self.coresys = coresys
|
|
||||||
self._alsa = AlsaAudio(coresys)
|
|
||||||
self._apparmor = AppArmorControl(coresys)
|
|
||||||
self._control = SystemControl(coresys)
|
|
||||||
self._info = InfoCenter(coresys)
|
|
||||||
self._services = ServiceManager(coresys)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def alsa(self):
|
|
||||||
"""Return host ALSA handler."""
|
|
||||||
return self._alsa
|
|
||||||
|
|
||||||
@property
|
|
||||||
def apparmor(self):
|
|
||||||
"""Return host AppArmor handler."""
|
|
||||||
return self._apparmor
|
|
||||||
|
|
||||||
@property
|
|
||||||
def control(self):
|
|
||||||
"""Return host control handler."""
|
|
||||||
return self._control
|
|
||||||
|
|
||||||
@property
|
|
||||||
def info(self):
|
|
||||||
"""Return host info handler."""
|
|
||||||
return self._info
|
|
||||||
|
|
||||||
@property
|
|
||||||
def services(self):
|
|
||||||
"""Return host services handler."""
|
|
||||||
return self._services
|
|
||||||
|
|
||||||
@property
|
|
||||||
def supperted_features(self):
|
|
||||||
"""Return a list of supported host features."""
|
|
||||||
features = []
|
|
||||||
|
|
||||||
if self.sys_dbus.systemd.is_connected:
|
|
||||||
features.extend([
|
|
||||||
FEATURES_REBOOT,
|
|
||||||
FEATURES_SHUTDOWN,
|
|
||||||
FEATURES_SERVICES,
|
|
||||||
])
|
|
||||||
|
|
||||||
if self.sys_dbus.hostname.is_connected:
|
|
||||||
features.append(FEATURES_HOSTNAME)
|
|
||||||
|
|
||||||
if self.sys_hassos.available:
|
|
||||||
features.append(FEATURES_HASSOS)
|
|
||||||
|
|
||||||
return features
|
|
||||||
|
|
||||||
async def reload(self):
|
|
||||||
"""Reload host functions."""
|
|
||||||
if self.sys_dbus.hostname.is_connected:
|
|
||||||
await self.info.update()
|
|
||||||
|
|
||||||
if self.sys_dbus.systemd.is_connected:
|
|
||||||
await self.services.update()
|
|
||||||
|
|
||||||
async def load(self):
|
|
||||||
"""Load host information."""
|
|
||||||
with suppress(HassioError):
|
|
||||||
await self.reload()
|
|
||||||
|
|
||||||
# Load profile data
|
|
||||||
try:
|
|
||||||
await self.apparmor.load()
|
|
||||||
except HassioError as err:
|
|
||||||
_LOGGER.waring("Load host AppArmor on start fails: %s", err)
|
|
@@ -1,140 +0,0 @@
|
|||||||
"""Host Audio support."""
|
|
||||||
import logging
|
|
||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
from string import Template
|
|
||||||
|
|
||||||
import attr
|
|
||||||
|
|
||||||
from ..const import (
|
|
||||||
ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME, CHAN_ID, CHAN_TYPE)
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# pylint: disable=invalid-name
|
|
||||||
DefaultConfig = attr.make_class('DefaultConfig', ['input', 'output'])
|
|
||||||
|
|
||||||
|
|
||||||
class AlsaAudio(CoreSysAttributes):
|
|
||||||
"""Handle Audio ALSA host data."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize ALSA audio system."""
|
|
||||||
self.coresys = coresys
|
|
||||||
self._data = {
|
|
||||||
ATTR_INPUT: {},
|
|
||||||
ATTR_OUTPUT: {},
|
|
||||||
}
|
|
||||||
self._cache = 0
|
|
||||||
self._default = None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def input_devices(self):
|
|
||||||
"""Return list of ALSA input devices."""
|
|
||||||
self._update_device()
|
|
||||||
return self._data[ATTR_INPUT]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def output_devices(self):
|
|
||||||
"""Return list of ALSA output devices."""
|
|
||||||
self._update_device()
|
|
||||||
return self._data[ATTR_OUTPUT]
|
|
||||||
|
|
||||||
def _update_device(self):
|
|
||||||
"""Update Internal device DB."""
|
|
||||||
current_id = hash(frozenset(self.sys_hardware.audio_devices))
|
|
||||||
|
|
||||||
# Need rebuild?
|
|
||||||
if current_id == self._cache:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Clean old stuff
|
|
||||||
self._data[ATTR_INPUT].clear()
|
|
||||||
self._data[ATTR_OUTPUT].clear()
|
|
||||||
|
|
||||||
# Init database
|
|
||||||
_LOGGER.info("Update ALSA device list")
|
|
||||||
database = self._audio_database()
|
|
||||||
|
|
||||||
# Process devices
|
|
||||||
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
|
|
||||||
for chan_info in dev_data[ATTR_DEVICES]:
|
|
||||||
chan_id = chan_info[CHAN_ID]
|
|
||||||
chan_type = chan_info[CHAN_TYPE]
|
|
||||||
alsa_id = f"{dev_id},{chan_id}"
|
|
||||||
dev_name = dev_data[ATTR_NAME]
|
|
||||||
|
|
||||||
# Lookup type
|
|
||||||
if chan_type.endswith('playback'):
|
|
||||||
key = ATTR_OUTPUT
|
|
||||||
elif chan_type.endswith('capture'):
|
|
||||||
key = ATTR_INPUT
|
|
||||||
else:
|
|
||||||
_LOGGER.warning("Unknown channel type: %s", chan_type)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Use name from DB or a generic name
|
|
||||||
self._data[key][alsa_id] = database.get(
|
|
||||||
self.sys_machine, {}).get(
|
|
||||||
dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}")
|
|
||||||
|
|
||||||
self._cache = current_id
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _audio_database():
|
|
||||||
"""Read local json audio data into dict."""
|
|
||||||
json_file = Path(__file__).parent.joinpath("data/audiodb.json")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# pylint: disable=no-member
|
|
||||||
with json_file.open('r') as database:
|
|
||||||
return json.loads(database.read())
|
|
||||||
except (ValueError, OSError) as err:
|
|
||||||
_LOGGER.warning("Can't read audio DB: %s", err)
|
|
||||||
|
|
||||||
return {}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def default(self):
|
|
||||||
"""Generate ALSA default setting."""
|
|
||||||
# Init defaults
|
|
||||||
if self._default is None:
|
|
||||||
database = self._audio_database()
|
|
||||||
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
|
|
||||||
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
|
|
||||||
|
|
||||||
self._default = DefaultConfig(alsa_input, alsa_output)
|
|
||||||
|
|
||||||
# Search exists/new output
|
|
||||||
if self._default.output is None and self.output_devices:
|
|
||||||
self._default.output = next(iter(self.output_devices))
|
|
||||||
_LOGGER.info("Detect output device %s", self._default.output)
|
|
||||||
|
|
||||||
# Search exists/new input
|
|
||||||
if self._default.input is None and self.input_devices:
|
|
||||||
self._default.input = next(iter(self.input_devices))
|
|
||||||
_LOGGER.info("Detect input device %s", self._default.input)
|
|
||||||
|
|
||||||
return self._default
|
|
||||||
|
|
||||||
def asound(self, alsa_input=None, alsa_output=None):
|
|
||||||
"""Generate an asound data."""
|
|
||||||
alsa_input = alsa_input or self.default.input
|
|
||||||
alsa_output = alsa_output or self.default.output
|
|
||||||
|
|
||||||
# Read Template
|
|
||||||
asound_file = Path(__file__).parent.joinpath("data/asound.tmpl")
|
|
||||||
try:
|
|
||||||
# pylint: disable=no-member
|
|
||||||
with asound_file.open('r') as asound:
|
|
||||||
asound_data = asound.read()
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("Can't read asound.tmpl: %s", err)
|
|
||||||
return ""
|
|
||||||
|
|
||||||
# Process Template
|
|
||||||
asound_template = Template(asound_data)
|
|
||||||
return asound_template.safe_substitute(
|
|
||||||
input=alsa_input, output=alsa_output
|
|
||||||
)
|
|
@@ -1,17 +0,0 @@
|
|||||||
pcm.!default {
|
|
||||||
type asym
|
|
||||||
capture.pcm "mic"
|
|
||||||
playback.pcm "speaker"
|
|
||||||
}
|
|
||||||
pcm.mic {
|
|
||||||
type plug
|
|
||||||
slave {
|
|
||||||
pcm "hw:$input"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
pcm.speaker {
|
|
||||||
type plug
|
|
||||||
slave {
|
|
||||||
pcm "hw:$output"
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,18 +0,0 @@
|
|||||||
{
|
|
||||||
"raspberrypi3": {
|
|
||||||
"bcm2835 - bcm2835 ALSA": {
|
|
||||||
"0,0": "Raspberry Jack",
|
|
||||||
"0,1": "Raspberry HDMI"
|
|
||||||
},
|
|
||||||
"output": "0,0",
|
|
||||||
"input": null
|
|
||||||
},
|
|
||||||
"raspberrypi2": {
|
|
||||||
"output": "0,0",
|
|
||||||
"input": null
|
|
||||||
},
|
|
||||||
"raspberrypi": {
|
|
||||||
"output": "0,0",
|
|
||||||
"input": null
|
|
||||||
}
|
|
||||||
}
|
|
@@ -1,58 +0,0 @@
|
|||||||
"""Info control for host."""
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from ..coresys import CoreSysAttributes
|
|
||||||
from ..exceptions import HassioError, HostNotSupportedError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class InfoCenter(CoreSysAttributes):
|
|
||||||
"""Handle local system information controls."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize system center handling."""
|
|
||||||
self.coresys = coresys
|
|
||||||
self._data = {}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hostname(self):
|
|
||||||
"""Return local hostname."""
|
|
||||||
return self._data.get('StaticHostname') or None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def chassis(self):
|
|
||||||
"""Return local chassis type."""
|
|
||||||
return self._data.get('Chassis') or None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def deployment(self):
|
|
||||||
"""Return local deployment type."""
|
|
||||||
return self._data.get('Deployment') or None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def kernel(self):
|
|
||||||
"""Return local kernel version."""
|
|
||||||
return self._data.get('KernelRelease') or None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def operating_system(self):
|
|
||||||
"""Return local operating system."""
|
|
||||||
return self._data.get('OperatingSystemPrettyName') or None
|
|
||||||
|
|
||||||
@property
|
|
||||||
def cpe(self):
|
|
||||||
"""Return local CPE."""
|
|
||||||
return self._data.get('OperatingSystemCPEName') or None
|
|
||||||
|
|
||||||
async def update(self):
|
|
||||||
"""Update properties over dbus."""
|
|
||||||
if not self.sys_dbus.hostname.is_connected:
|
|
||||||
_LOGGER.error("No hostname D-Bus connection available")
|
|
||||||
raise HostNotSupportedError()
|
|
||||||
|
|
||||||
_LOGGER.info("Update local host information")
|
|
||||||
try:
|
|
||||||
self._data = await self.sys_dbus.hostname.get_properties()
|
|
||||||
except HassioError:
|
|
||||||
_LOGGER.warning("Can't update host system information!")
|
|
@@ -1,103 +0,0 @@
|
|||||||
"""Fetch last versions from webserver."""
|
|
||||||
from datetime import timedelta
|
|
||||||
import logging
|
|
||||||
from typing import Dict, Optional
|
|
||||||
import secrets
|
|
||||||
|
|
||||||
from .addons.addon import Addon
|
|
||||||
from .const import ATTR_SESSION, FILE_HASSIO_INGRESS
|
|
||||||
from .coresys import CoreSys, CoreSysAttributes
|
|
||||||
from .utils.json import JsonConfig
|
|
||||||
from .utils.dt import utcnow, utc_from_timestamp
|
|
||||||
from .validate import SCHEMA_INGRESS_CONFIG
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Ingress(JsonConfig, CoreSysAttributes):
|
|
||||||
"""Fetch last versions from version.json."""
|
|
||||||
|
|
||||||
def __init__(self, coresys: CoreSys):
|
|
||||||
"""Initialize updater."""
|
|
||||||
super().__init__(FILE_HASSIO_INGRESS, SCHEMA_INGRESS_CONFIG)
|
|
||||||
self.coresys: CoreSys = coresys
|
|
||||||
self.tokens: Dict[str, str] = {}
|
|
||||||
|
|
||||||
def get(self, token: str) -> Optional[Addon]:
|
|
||||||
"""Return addon they have this ingress token."""
|
|
||||||
if token not in self.tokens:
|
|
||||||
self._update_token_list()
|
|
||||||
return self.sys_addons.get(self.tokens.get(token))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def sessions(self) -> Dict[str, float]:
|
|
||||||
"""Return sessions."""
|
|
||||||
return self._data[ATTR_SESSION]
|
|
||||||
|
|
||||||
async def load(self) -> None:
|
|
||||||
"""Update internal data."""
|
|
||||||
self._update_token_list()
|
|
||||||
self._cleanup_sessions()
|
|
||||||
|
|
||||||
_LOGGER.info("Load %d ingress session", len(self.sessions))
|
|
||||||
|
|
||||||
async def reload(self) -> None:
|
|
||||||
"""Reload/Validate sessions."""
|
|
||||||
self._cleanup_sessions()
|
|
||||||
|
|
||||||
async def unload(self) -> None:
|
|
||||||
"""Shutdown sessions."""
|
|
||||||
self.save_data()
|
|
||||||
|
|
||||||
def _cleanup_sessions(self) -> None:
|
|
||||||
"""Remove not used sessions."""
|
|
||||||
now = utcnow()
|
|
||||||
|
|
||||||
sessions = {}
|
|
||||||
for session, valid in self.sessions.items():
|
|
||||||
valid_dt = utc_from_timestamp(valid)
|
|
||||||
if valid_dt < now:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Is valid
|
|
||||||
sessions[session] = valid
|
|
||||||
|
|
||||||
# Write back
|
|
||||||
self.sessions.clear()
|
|
||||||
self.sessions.update(sessions)
|
|
||||||
|
|
||||||
def _update_token_list(self) -> None:
|
|
||||||
"""Regenerate token <-> Add-on map."""
|
|
||||||
self.tokens.clear()
|
|
||||||
|
|
||||||
# Read all ingress token and build a map
|
|
||||||
for addon in self.sys_addons.list_installed:
|
|
||||||
if not addon.with_ingress:
|
|
||||||
continue
|
|
||||||
self.tokens[addon.ingress_token] = addon.slug
|
|
||||||
|
|
||||||
def create_session(self) -> str:
|
|
||||||
"""Create new session."""
|
|
||||||
session = secrets.token_hex(64)
|
|
||||||
valid = utcnow() + timedelta(minutes=15)
|
|
||||||
|
|
||||||
self.sessions[session] = valid.timestamp()
|
|
||||||
self.save_data()
|
|
||||||
|
|
||||||
return session
|
|
||||||
|
|
||||||
def validate_session(self, session: str) -> bool:
|
|
||||||
"""Return True if session valid and make it longer valid."""
|
|
||||||
if session not in self.sessions:
|
|
||||||
return False
|
|
||||||
valid_until = utc_from_timestamp(self.sessions[session])
|
|
||||||
|
|
||||||
# Is still valid?
|
|
||||||
if valid_until < utcnow():
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Update time
|
|
||||||
valid_until = valid_until + timedelta(minutes=15)
|
|
||||||
self.sessions[session] = valid_until.timestamp()
|
|
||||||
|
|
||||||
return True
|
|
@@ -1 +0,0 @@
|
|||||||
"""Special object and tools for Hass.io."""
|
|
@@ -1,46 +0,0 @@
|
|||||||
"""Setup the internal DNS service for host applications."""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import shlex
|
|
||||||
|
|
||||||
import async_timeout
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
COMMAND = "socat UDP-RECVFROM:53,fork UDP-SENDTO:127.0.0.11:53"
|
|
||||||
|
|
||||||
|
|
||||||
class DNSForward:
|
|
||||||
"""Manage DNS forwarding to internal DNS."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize DNS forwarding."""
|
|
||||||
self.proc = None
|
|
||||||
|
|
||||||
async def start(self):
|
|
||||||
"""Start DNS forwarding."""
|
|
||||||
try:
|
|
||||||
self.proc = await asyncio.create_subprocess_exec(
|
|
||||||
*shlex.split(COMMAND),
|
|
||||||
stdin=asyncio.subprocess.DEVNULL,
|
|
||||||
stdout=asyncio.subprocess.DEVNULL,
|
|
||||||
stderr=asyncio.subprocess.DEVNULL)
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("Can't start DNS forwarding: %s", err)
|
|
||||||
else:
|
|
||||||
_LOGGER.info("Start DNS port forwarding for host add-ons")
|
|
||||||
|
|
||||||
async def stop(self):
|
|
||||||
"""Stop DNS forwarding."""
|
|
||||||
if not self.proc:
|
|
||||||
_LOGGER.warning("DNS forwarding is not running!")
|
|
||||||
return
|
|
||||||
|
|
||||||
self.proc.kill()
|
|
||||||
try:
|
|
||||||
with async_timeout.timeout(5):
|
|
||||||
await self.proc.wait()
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
_LOGGER.warning("Stop waiting for DNS shutdown")
|
|
||||||
|
|
||||||
_LOGGER.info("Stop DNS forwarding")
|
|
@@ -1,136 +0,0 @@
|
|||||||
"""Read hardware info from system."""
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
from pathlib import Path
|
|
||||||
import re
|
|
||||||
|
|
||||||
import pyudev
|
|
||||||
|
|
||||||
from ..const import ATTR_NAME, ATTR_TYPE, ATTR_DEVICES, CHAN_ID, CHAN_TYPE
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
ASOUND_CARDS = Path("/proc/asound/cards")
|
|
||||||
RE_CARDS = re.compile(r"(\d+) \[(\w*) *\]: (.*\w)")
|
|
||||||
|
|
||||||
ASOUND_DEVICES = Path("/proc/asound/devices")
|
|
||||||
RE_DEVICES = re.compile(r"\[.*(\d+)- (\d+).*\]: ([\w ]*)")
|
|
||||||
|
|
||||||
PROC_STAT = Path("/proc/stat")
|
|
||||||
RE_BOOT_TIME = re.compile(r"btime (\d+)")
|
|
||||||
|
|
||||||
GPIO_DEVICES = Path("/sys/class/gpio")
|
|
||||||
SOC_DEVICES = Path("/sys/devices/platform/soc")
|
|
||||||
RE_TTY = re.compile(r"tty[A-Z]+")
|
|
||||||
|
|
||||||
|
|
||||||
class Hardware:
|
|
||||||
"""Representation of an interface to procfs, sysfs and udev."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Init hardware object."""
|
|
||||||
self.context = pyudev.Context()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def serial_devices(self):
|
|
||||||
"""Return all serial and connected devices."""
|
|
||||||
dev_list = set()
|
|
||||||
for device in self.context.list_devices(subsystem='tty'):
|
|
||||||
if 'ID_VENDOR' in device or RE_TTY.search(device.device_node):
|
|
||||||
dev_list.add(device.device_node)
|
|
||||||
|
|
||||||
return dev_list
|
|
||||||
|
|
||||||
@property
|
|
||||||
def input_devices(self):
|
|
||||||
"""Return all input devices."""
|
|
||||||
dev_list = set()
|
|
||||||
for device in self.context.list_devices(subsystem='input'):
|
|
||||||
if 'NAME' in device:
|
|
||||||
dev_list.add(device['NAME'].replace('"', ''))
|
|
||||||
|
|
||||||
return dev_list
|
|
||||||
|
|
||||||
@property
|
|
||||||
def disk_devices(self):
|
|
||||||
"""Return all disk devices."""
|
|
||||||
dev_list = set()
|
|
||||||
for device in self.context.list_devices(subsystem='block'):
|
|
||||||
if device.device_node.startswith('/dev/sd'):
|
|
||||||
dev_list.add(device.device_node)
|
|
||||||
|
|
||||||
return dev_list
|
|
||||||
|
|
||||||
@property
|
|
||||||
def support_audio(self):
|
|
||||||
"""Return True if the system have audio support."""
|
|
||||||
return bool(self.audio_devices)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def audio_devices(self):
|
|
||||||
"""Return all available audio interfaces."""
|
|
||||||
if not ASOUND_CARDS.exists():
|
|
||||||
_LOGGER.info("No audio devices found")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
cards = ASOUND_CARDS.read_text()
|
|
||||||
devices = ASOUND_DEVICES.read_text()
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("Can't read asound data: %s", err)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
audio_list = {}
|
|
||||||
|
|
||||||
# parse cards
|
|
||||||
for match in RE_CARDS.finditer(cards):
|
|
||||||
audio_list[match.group(1)] = {
|
|
||||||
ATTR_NAME: match.group(3),
|
|
||||||
ATTR_TYPE: match.group(2),
|
|
||||||
ATTR_DEVICES: [],
|
|
||||||
}
|
|
||||||
|
|
||||||
# parse devices
|
|
||||||
for match in RE_DEVICES.finditer(devices):
|
|
||||||
try:
|
|
||||||
audio_list[match.group(1)][ATTR_DEVICES].append({
|
|
||||||
CHAN_ID: match.group(2),
|
|
||||||
CHAN_TYPE: match.group(3)
|
|
||||||
})
|
|
||||||
except KeyError:
|
|
||||||
_LOGGER.warning("Wrong audio device found %s", match.group(0))
|
|
||||||
continue
|
|
||||||
|
|
||||||
return audio_list
|
|
||||||
|
|
||||||
@property
|
|
||||||
def support_gpio(self):
|
|
||||||
"""Return True if device support GPIOs."""
|
|
||||||
return SOC_DEVICES.exists() and GPIO_DEVICES.exists()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def gpio_devices(self):
|
|
||||||
"""Return list of GPIO interface on device."""
|
|
||||||
dev_list = set()
|
|
||||||
for interface in GPIO_DEVICES.glob("gpio*"):
|
|
||||||
dev_list.add(interface.name)
|
|
||||||
|
|
||||||
return dev_list
|
|
||||||
|
|
||||||
@property
|
|
||||||
def last_boot(self):
|
|
||||||
"""Return last boot time."""
|
|
||||||
try:
|
|
||||||
with PROC_STAT.open("r") as stat_file:
|
|
||||||
stats = stat_file.read()
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("Can't read stat data: %s", err)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# parse stat file
|
|
||||||
found = RE_BOOT_TIME.search(stats)
|
|
||||||
if not found:
|
|
||||||
_LOGGER.error("Can't found last boot time!")
|
|
||||||
return None
|
|
||||||
|
|
||||||
return datetime.utcfromtimestamp(int(found.group(1)))
|
|
@@ -1,76 +0,0 @@
|
|||||||
"""Schedule for Hass.io."""
|
|
||||||
import asyncio
|
|
||||||
from datetime import date, datetime, time, timedelta
|
|
||||||
import logging
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
INTERVAL = 'interval'
|
|
||||||
REPEAT = 'repeat'
|
|
||||||
CALL = 'callback'
|
|
||||||
TASK = 'task'
|
|
||||||
|
|
||||||
|
|
||||||
class Scheduler:
|
|
||||||
"""Schedule task inside Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize task schedule."""
|
|
||||||
self.loop = asyncio.get_running_loop()
|
|
||||||
self._data = {}
|
|
||||||
self.suspend = False
|
|
||||||
|
|
||||||
def register_task(self, coro_callback, interval, repeat=True):
|
|
||||||
"""Schedule a coroutine.
|
|
||||||
|
|
||||||
The coroutine need to be a callback without arguments.
|
|
||||||
"""
|
|
||||||
task_id = hash(coro_callback)
|
|
||||||
|
|
||||||
# Generate data
|
|
||||||
opts = {
|
|
||||||
CALL: coro_callback,
|
|
||||||
INTERVAL: interval,
|
|
||||||
REPEAT: repeat,
|
|
||||||
}
|
|
||||||
|
|
||||||
# Schedule task
|
|
||||||
self._data[task_id] = opts
|
|
||||||
self._schedule_task(interval, task_id)
|
|
||||||
|
|
||||||
return task_id
|
|
||||||
|
|
||||||
def _run_task(self, task_id):
|
|
||||||
"""Run a scheduled task."""
|
|
||||||
data = self._data[task_id]
|
|
||||||
|
|
||||||
if not self.suspend:
|
|
||||||
self.loop.create_task(data[CALL]())
|
|
||||||
|
|
||||||
if data[REPEAT]:
|
|
||||||
self._schedule_task(data[INTERVAL], task_id)
|
|
||||||
else:
|
|
||||||
self._data.pop(task_id)
|
|
||||||
|
|
||||||
def _schedule_task(self, interval, task_id):
|
|
||||||
"""Schedule a task on loop."""
|
|
||||||
if isinstance(interval, (int, float)):
|
|
||||||
job = self.loop.call_later(interval, self._run_task, task_id)
|
|
||||||
elif isinstance(interval, time):
|
|
||||||
today = datetime.combine(date.today(), interval)
|
|
||||||
tomorrow = datetime.combine(date.today() + timedelta(days=1),
|
|
||||||
interval)
|
|
||||||
|
|
||||||
# Check if we run it today or next day
|
|
||||||
if today > datetime.today():
|
|
||||||
calc = today
|
|
||||||
else:
|
|
||||||
calc = tomorrow
|
|
||||||
|
|
||||||
job = self.loop.call_at(calc.timestamp(), self._run_task, task_id)
|
|
||||||
else:
|
|
||||||
_LOGGER.fatal("Unknown interval %s (type: %s) for scheduler %s",
|
|
||||||
interval, type(interval), task_id)
|
|
||||||
|
|
||||||
# Store job
|
|
||||||
self._data[task_id][TASK] = job
|
|
@@ -1,12 +0,0 @@
|
|||||||
"""Validate services schema."""
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from ..utils.validate import schema_or
|
|
||||||
from .const import SERVICE_MQTT
|
|
||||||
from .modules.mqtt import SCHEMA_CONFIG_MQTT
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_SERVICES_CONFIG = vol.Schema(
|
|
||||||
{vol.Optional(SERVICE_MQTT, default=dict): schema_or(SCHEMA_CONFIG_MQTT)},
|
|
||||||
extra=vol.REMOVE_EXTRA,
|
|
||||||
)
|
|
196
hassio/tasks.py
196
hassio/tasks.py
@@ -1,196 +0,0 @@
|
|||||||
"""A collection of tasks."""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from .coresys import CoreSysAttributes
|
|
||||||
from .exceptions import HomeAssistantError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
HASS_WATCHDOG_API = "HASS_WATCHDOG_API"
|
|
||||||
|
|
||||||
RUN_UPDATE_SUPERVISOR = 29100
|
|
||||||
RUN_UPDATE_ADDONS = 57600
|
|
||||||
RUN_UPDATE_HASSOSCLI = 29100
|
|
||||||
|
|
||||||
RUN_RELOAD_ADDONS = 21600
|
|
||||||
RUN_RELOAD_SNAPSHOTS = 72000
|
|
||||||
RUN_RELOAD_HOST = 72000
|
|
||||||
RUN_RELOAD_UPDATER = 21600
|
|
||||||
RUN_RELOAD_INGRESS = 930
|
|
||||||
|
|
||||||
RUN_WATCHDOG_HOMEASSISTANT_DOCKER = 15
|
|
||||||
RUN_WATCHDOG_HOMEASSISTANT_API = 300
|
|
||||||
|
|
||||||
|
|
||||||
class Tasks(CoreSysAttributes):
|
|
||||||
"""Handle Tasks inside Hass.io."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize Tasks."""
|
|
||||||
self.coresys = coresys
|
|
||||||
self.jobs = set()
|
|
||||||
self._cache = {}
|
|
||||||
|
|
||||||
async def load(self):
|
|
||||||
"""Add Tasks to scheduler."""
|
|
||||||
# Update
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(self._update_addons, RUN_UPDATE_ADDONS)
|
|
||||||
)
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(
|
|
||||||
self._update_supervisor, RUN_UPDATE_SUPERVISOR
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(
|
|
||||||
self._update_hassos_cli, RUN_UPDATE_HASSOSCLI
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Reload
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(self.sys_addons.reload, RUN_RELOAD_ADDONS)
|
|
||||||
)
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(
|
|
||||||
self.sys_updater.reload, RUN_RELOAD_UPDATER
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(
|
|
||||||
self.sys_snapshots.reload, RUN_RELOAD_SNAPSHOTS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(self.sys_host.reload, RUN_RELOAD_HOST)
|
|
||||||
)
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(
|
|
||||||
self.sys_ingress.reload, RUN_RELOAD_INGRESS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Watchdog
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(
|
|
||||||
self._watchdog_homeassistant_docker, RUN_WATCHDOG_HOMEASSISTANT_DOCKER
|
|
||||||
)
|
|
||||||
)
|
|
||||||
self.jobs.add(
|
|
||||||
self.sys_scheduler.register_task(
|
|
||||||
self._watchdog_homeassistant_api, RUN_WATCHDOG_HOMEASSISTANT_API
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
_LOGGER.info("All core tasks are scheduled")
|
|
||||||
|
|
||||||
async def _update_addons(self):
|
|
||||||
"""Check if an update is available for an Add-on and update it."""
|
|
||||||
tasks = []
|
|
||||||
for addon in self.sys_addons.list_addons:
|
|
||||||
if not addon.is_installed or not addon.auto_update:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if addon.version_installed == addon.last_version:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if addon.test_update_schema():
|
|
||||||
tasks.append(addon.update())
|
|
||||||
else:
|
|
||||||
_LOGGER.warning(
|
|
||||||
"Add-on %s will be ignored, schema tests fails", addon.slug
|
|
||||||
)
|
|
||||||
|
|
||||||
if tasks:
|
|
||||||
_LOGGER.info("Add-on auto update process %d tasks", len(tasks))
|
|
||||||
await asyncio.wait(tasks)
|
|
||||||
|
|
||||||
async def _update_supervisor(self):
|
|
||||||
"""Check and run update of Supervisor Hass.io."""
|
|
||||||
if not self.sys_supervisor.need_update:
|
|
||||||
return
|
|
||||||
|
|
||||||
# don't perform an update on dev channel
|
|
||||||
if self.sys_dev:
|
|
||||||
_LOGGER.warning("Ignore Hass.io update on dev channel!")
|
|
||||||
return
|
|
||||||
|
|
||||||
_LOGGER.info("Found new Hass.io version")
|
|
||||||
await self.sys_supervisor.update()
|
|
||||||
|
|
||||||
async def _watchdog_homeassistant_docker(self):
|
|
||||||
"""Check running state of Docker and start if they is close."""
|
|
||||||
# if Home Assistant is active
|
|
||||||
if (
|
|
||||||
not await self.sys_homeassistant.is_fails()
|
|
||||||
or not self.sys_homeassistant.watchdog
|
|
||||||
or self.sys_homeassistant.error_state
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
# if Home Assistant is running
|
|
||||||
if (
|
|
||||||
self.sys_homeassistant.in_progress
|
|
||||||
or await self.sys_homeassistant.is_running()
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
_LOGGER.warning("Watchdog found a problem with Home Assistant Docker!")
|
|
||||||
try:
|
|
||||||
await self.sys_homeassistant.start()
|
|
||||||
except HomeAssistantError:
|
|
||||||
_LOGGER.error("Watchdog Home Assistant reanimation fails!")
|
|
||||||
|
|
||||||
async def _watchdog_homeassistant_api(self):
|
|
||||||
"""Create scheduler task for monitoring running state of API.
|
|
||||||
|
|
||||||
Try 2 times to call API before we restart Home-Assistant. Maybe we had
|
|
||||||
a delay in our system.
|
|
||||||
"""
|
|
||||||
# If Home-Assistant is active
|
|
||||||
if (
|
|
||||||
not await self.sys_homeassistant.is_fails()
|
|
||||||
or not self.sys_homeassistant.watchdog
|
|
||||||
or self.sys_homeassistant.error_state
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Init cache data
|
|
||||||
retry_scan = self._cache.get(HASS_WATCHDOG_API, 0)
|
|
||||||
|
|
||||||
# If Home-Assistant API is up
|
|
||||||
if (
|
|
||||||
self.sys_homeassistant.in_progress
|
|
||||||
or await self.sys_homeassistant.check_api_state()
|
|
||||||
):
|
|
||||||
return
|
|
||||||
|
|
||||||
# Look like we run into a problem
|
|
||||||
retry_scan += 1
|
|
||||||
if retry_scan == 1:
|
|
||||||
self._cache[HASS_WATCHDOG_API] = retry_scan
|
|
||||||
_LOGGER.warning("Watchdog miss API response from Home Assistant")
|
|
||||||
return
|
|
||||||
|
|
||||||
_LOGGER.error("Watchdog found a problem with Home Assistant API!")
|
|
||||||
try:
|
|
||||||
await self.sys_homeassistant.restart()
|
|
||||||
except HomeAssistantError:
|
|
||||||
_LOGGER.error("Watchdog Home Assistant reanimation fails!")
|
|
||||||
finally:
|
|
||||||
self._cache[HASS_WATCHDOG_API] = 0
|
|
||||||
|
|
||||||
async def _update_hassos_cli(self):
|
|
||||||
"""Check and run update of HassOS CLI."""
|
|
||||||
if not self.sys_hassos.need_cli_update:
|
|
||||||
return
|
|
||||||
|
|
||||||
# don't perform an update on dev channel
|
|
||||||
if self.sys_dev:
|
|
||||||
_LOGGER.warning("Ignore HassOS CLI update on dev channel!")
|
|
||||||
return
|
|
||||||
|
|
||||||
_LOGGER.info("Found new HassOS CLI version")
|
|
||||||
await self.sys_hassos.update_cli()
|
|
@@ -1,115 +0,0 @@
|
|||||||
"""Fetch last versions from webserver."""
|
|
||||||
import asyncio
|
|
||||||
from contextlib import suppress
|
|
||||||
from datetime import timedelta
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
URL_HASSIO_VERSION, FILE_HASSIO_UPDATER, ATTR_HOMEASSISTANT, ATTR_HASSIO,
|
|
||||||
ATTR_CHANNEL, ATTR_HASSOS, ATTR_HASSOS_CLI)
|
|
||||||
from .coresys import CoreSysAttributes
|
|
||||||
from .utils import AsyncThrottle
|
|
||||||
from .utils.json import JsonConfig
|
|
||||||
from .validate import SCHEMA_UPDATER_CONFIG
|
|
||||||
from .exceptions import HassioUpdaterError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Updater(JsonConfig, CoreSysAttributes):
|
|
||||||
"""Fetch last versions from version.json."""
|
|
||||||
|
|
||||||
def __init__(self, coresys):
|
|
||||||
"""Initialize updater."""
|
|
||||||
super().__init__(FILE_HASSIO_UPDATER, SCHEMA_UPDATER_CONFIG)
|
|
||||||
self.coresys = coresys
|
|
||||||
|
|
||||||
async def load(self):
|
|
||||||
"""Update internal data."""
|
|
||||||
with suppress(HassioUpdaterError):
|
|
||||||
await self.fetch_data()
|
|
||||||
|
|
||||||
async def reload(self):
|
|
||||||
"""Update internal data."""
|
|
||||||
with suppress(HassioUpdaterError):
|
|
||||||
await self.fetch_data()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version_homeassistant(self):
|
|
||||||
"""Return last version of Home Assistant."""
|
|
||||||
return self._data.get(ATTR_HOMEASSISTANT)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version_hassio(self):
|
|
||||||
"""Return last version of Hass.io."""
|
|
||||||
return self._data.get(ATTR_HASSIO)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version_hassos(self):
|
|
||||||
"""Return last version of HassOS."""
|
|
||||||
return self._data.get(ATTR_HASSOS)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version_hassos_cli(self):
|
|
||||||
"""Return last version of HassOS cli."""
|
|
||||||
return self._data.get(ATTR_HASSOS_CLI)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def channel(self):
|
|
||||||
"""Return upstream channel of Hass.io instance."""
|
|
||||||
return self._data[ATTR_CHANNEL]
|
|
||||||
|
|
||||||
@channel.setter
|
|
||||||
def channel(self, value):
|
|
||||||
"""Set upstream mode."""
|
|
||||||
self._data[ATTR_CHANNEL] = value
|
|
||||||
|
|
||||||
@AsyncThrottle(timedelta(seconds=60))
|
|
||||||
async def fetch_data(self):
|
|
||||||
"""Fetch current versions from Github.
|
|
||||||
|
|
||||||
Is a coroutine.
|
|
||||||
"""
|
|
||||||
url = URL_HASSIO_VERSION.format(channel=self.channel)
|
|
||||||
machine = self.sys_machine or 'default'
|
|
||||||
board = self.sys_hassos.board
|
|
||||||
|
|
||||||
try:
|
|
||||||
_LOGGER.info("Fetch update data from %s", url)
|
|
||||||
async with self.sys_websession.get(url, timeout=10) as request:
|
|
||||||
data = await request.json(content_type=None)
|
|
||||||
|
|
||||||
except (aiohttp.ClientError, asyncio.TimeoutError) as err:
|
|
||||||
_LOGGER.warning("Can't fetch versions from %s: %s", url, err)
|
|
||||||
raise HassioUpdaterError() from None
|
|
||||||
|
|
||||||
except json.JSONDecodeError as err:
|
|
||||||
_LOGGER.warning("Can't parse versions from %s: %s", url, err)
|
|
||||||
raise HassioUpdaterError() from None
|
|
||||||
|
|
||||||
# data valid?
|
|
||||||
if not data or data.get(ATTR_CHANNEL) != self.channel:
|
|
||||||
_LOGGER.warning("Invalid data from %s", url)
|
|
||||||
raise HassioUpdaterError() from None
|
|
||||||
|
|
||||||
try:
|
|
||||||
# update supervisor version
|
|
||||||
self._data[ATTR_HASSIO] = data['supervisor']
|
|
||||||
|
|
||||||
# update Home Assistant version
|
|
||||||
self._data[ATTR_HOMEASSISTANT] = data['homeassistant'][machine]
|
|
||||||
|
|
||||||
# update hassos version
|
|
||||||
if self.sys_hassos.available and board:
|
|
||||||
self._data[ATTR_HASSOS] = data['hassos'][board]
|
|
||||||
self._data[ATTR_HASSOS_CLI] = data['hassos-cli']
|
|
||||||
|
|
||||||
except KeyError as err:
|
|
||||||
_LOGGER.warning("Can't process version data: %s", err)
|
|
||||||
raise HassioUpdaterError() from None
|
|
||||||
|
|
||||||
else:
|
|
||||||
self.save_data()
|
|
@@ -1,55 +0,0 @@
|
|||||||
"""Tools file for Hass.io."""
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
RE_STRING = re.compile(r"\x1b(\[.*?[@-~]|\].*?(\x07|\x1b\\))")
|
|
||||||
|
|
||||||
|
|
||||||
def convert_to_ascii(raw) -> str:
|
|
||||||
"""Convert binary to ascii and remove colors."""
|
|
||||||
return RE_STRING.sub("", raw.decode())
|
|
||||||
|
|
||||||
|
|
||||||
def process_lock(method):
|
|
||||||
"""Wrap function with only run once."""
|
|
||||||
|
|
||||||
async def wrap_api(api, *args, **kwargs):
|
|
||||||
"""Return api wrapper."""
|
|
||||||
if api.lock.locked():
|
|
||||||
_LOGGER.error(
|
|
||||||
"Can't execute %s while a task is in progress", method.__name__
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
async with api.lock:
|
|
||||||
return await method(api, *args, **kwargs)
|
|
||||||
|
|
||||||
return wrap_api
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncThrottle:
|
|
||||||
"""
|
|
||||||
Decorator that prevents a function from being called more than once every
|
|
||||||
time period.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, delta):
|
|
||||||
"""Initialize async throttle."""
|
|
||||||
self.throttle_period = delta
|
|
||||||
self.time_of_last_call = datetime.min
|
|
||||||
|
|
||||||
def __call__(self, method):
|
|
||||||
"""Throttle function"""
|
|
||||||
|
|
||||||
async def wrapper(*args, **kwargs):
|
|
||||||
"""Throttle function wrapper"""
|
|
||||||
now = datetime.now()
|
|
||||||
time_since_last_call = now - self.time_of_last_call
|
|
||||||
|
|
||||||
if time_since_last_call > self.throttle_period:
|
|
||||||
self.time_of_last_call = now
|
|
||||||
return await method(*args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
@@ -1,285 +0,0 @@
|
|||||||
"""DBus implementation with glib."""
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import json
|
|
||||||
import shlex
|
|
||||||
import re
|
|
||||||
from signal import SIGINT
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
|
|
||||||
from ..exceptions import DBusFatalError, DBusParseError
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Use to convert GVariant into json
|
|
||||||
RE_GVARIANT_TYPE = re.compile(
|
|
||||||
r"(?:boolean|byte|int16|uint16|int32|uint32|handle|int64|uint64|double|"
|
|
||||||
r"string|objectpath|signature) ")
|
|
||||||
RE_GVARIANT_VARIANT = re.compile(
|
|
||||||
r"(?<=(?: |{|\[))<((?:'|\").*?(?:'|\")|\d+(?:\.\d+)?)>(?=(?:|]|}|,))")
|
|
||||||
RE_GVARIANT_STRING = re.compile(r"(?<=(?: |{|\[|\())'(.*?)'(?=(?:|]|}|,|\)))")
|
|
||||||
RE_GVARIANT_TUPLE_O = re.compile(r"\"[^\"]*?\"|(\()")
|
|
||||||
RE_GVARIANT_TUPLE_C = re.compile(r"\"[^\"]*?\"|(,?\))")
|
|
||||||
|
|
||||||
RE_MONITOR_OUTPUT = re.compile(r".+?: (?P<signal>[^ ].+) (?P<data>.*)")
|
|
||||||
|
|
||||||
# Commands for dbus
|
|
||||||
INTROSPECT = ("gdbus introspect --system --dest {bus} "
|
|
||||||
"--object-path {object} --xml")
|
|
||||||
CALL = ("gdbus call --system --dest {bus} --object-path {object} "
|
|
||||||
"--method {method} {args}")
|
|
||||||
MONITOR = ("gdbus monitor --system --dest {bus}")
|
|
||||||
|
|
||||||
DBUS_METHOD_GETALL = 'org.freedesktop.DBus.Properties.GetAll'
|
|
||||||
|
|
||||||
|
|
||||||
class DBus:
|
|
||||||
"""DBus handler."""
|
|
||||||
|
|
||||||
def __init__(self, bus_name, object_path):
|
|
||||||
"""Initialize dbus object."""
|
|
||||||
self.bus_name = bus_name
|
|
||||||
self.object_path = object_path
|
|
||||||
self.methods = set()
|
|
||||||
self.signals = set()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def connect(bus_name, object_path):
|
|
||||||
"""Read object data."""
|
|
||||||
self = DBus(bus_name, object_path)
|
|
||||||
await self._init_proxy() # pylint: disable=protected-access
|
|
||||||
|
|
||||||
_LOGGER.info("Connect to dbus: %s - %s", bus_name, object_path)
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def _init_proxy(self):
|
|
||||||
"""Read interface data."""
|
|
||||||
command = shlex.split(INTROSPECT.format(
|
|
||||||
bus=self.bus_name,
|
|
||||||
object=self.object_path
|
|
||||||
))
|
|
||||||
|
|
||||||
# Ask data
|
|
||||||
_LOGGER.info("Introspect %s on %s", self.bus_name, self.object_path)
|
|
||||||
data = await self._send(command)
|
|
||||||
|
|
||||||
# Parse XML
|
|
||||||
try:
|
|
||||||
xml = ET.fromstring(data)
|
|
||||||
except ET.ParseError as err:
|
|
||||||
_LOGGER.error("Can't parse introspect data: %s", err)
|
|
||||||
raise DBusParseError() from None
|
|
||||||
|
|
||||||
# Read available methods
|
|
||||||
_LOGGER.debug("data: %s", data)
|
|
||||||
for interface in xml.findall("./interface"):
|
|
||||||
interface_name = interface.get('name')
|
|
||||||
|
|
||||||
# Methods
|
|
||||||
for method in interface.findall("./method"):
|
|
||||||
method_name = method.get('name')
|
|
||||||
self.methods.add(f"{interface_name}.{method_name}")
|
|
||||||
|
|
||||||
# Signals
|
|
||||||
for signal in interface.findall("./signal"):
|
|
||||||
signal_name = signal.get('name')
|
|
||||||
self.signals.add(f"{interface_name}.{signal_name}")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def parse_gvariant(raw):
|
|
||||||
"""Parse GVariant input to python."""
|
|
||||||
raw = RE_GVARIANT_TYPE.sub("", raw)
|
|
||||||
raw = RE_GVARIANT_VARIANT.sub(r"\1", raw)
|
|
||||||
raw = RE_GVARIANT_STRING.sub(r'"\1"', raw)
|
|
||||||
raw = RE_GVARIANT_TUPLE_O.sub(
|
|
||||||
lambda x: x.group(0) if not x.group(1) else"[", raw)
|
|
||||||
raw = RE_GVARIANT_TUPLE_C.sub(
|
|
||||||
lambda x: x.group(0) if not x.group(1) else"]", raw)
|
|
||||||
|
|
||||||
# No data
|
|
||||||
if raw.startswith("[]"):
|
|
||||||
return []
|
|
||||||
|
|
||||||
try:
|
|
||||||
return json.loads(raw)
|
|
||||||
except json.JSONDecodeError as err:
|
|
||||||
_LOGGER.error("Can't parse '%s': %s", raw, err)
|
|
||||||
raise DBusParseError() from None
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def gvariant_args(args):
|
|
||||||
"""Convert args into gvariant."""
|
|
||||||
gvariant = ""
|
|
||||||
for arg in args:
|
|
||||||
if isinstance(arg, bool):
|
|
||||||
gvariant += " {}".format(str(arg).lower())
|
|
||||||
elif isinstance(arg, (int, float)):
|
|
||||||
gvariant += f" {arg}"
|
|
||||||
elif isinstance(arg, str):
|
|
||||||
gvariant += f" \"{arg}\""
|
|
||||||
else:
|
|
||||||
gvariant += " {}".format(str(arg))
|
|
||||||
|
|
||||||
return gvariant.lstrip()
|
|
||||||
|
|
||||||
async def call_dbus(self, method, *args):
|
|
||||||
"""Call a dbus method."""
|
|
||||||
command = shlex.split(CALL.format(
|
|
||||||
bus=self.bus_name,
|
|
||||||
object=self.object_path,
|
|
||||||
method=method,
|
|
||||||
args=self.gvariant_args(args)
|
|
||||||
))
|
|
||||||
|
|
||||||
# Run command
|
|
||||||
_LOGGER.info("Call %s on %s", method, self.object_path)
|
|
||||||
data = await self._send(command)
|
|
||||||
|
|
||||||
# Parse and return data
|
|
||||||
return self.parse_gvariant(data)
|
|
||||||
|
|
||||||
async def get_properties(self, interface):
|
|
||||||
"""Read all properties from interface."""
|
|
||||||
try:
|
|
||||||
return (await self.call_dbus(DBUS_METHOD_GETALL, interface))[0]
|
|
||||||
except IndexError:
|
|
||||||
_LOGGER.error("No attributes returned for %s", interface)
|
|
||||||
raise DBusFatalError from None
|
|
||||||
|
|
||||||
async def _send(self, command):
|
|
||||||
"""Send command over dbus."""
|
|
||||||
# Run command
|
|
||||||
_LOGGER.debug("Send dbus command: %s", command)
|
|
||||||
try:
|
|
||||||
proc = await asyncio.create_subprocess_exec(
|
|
||||||
*command,
|
|
||||||
stdin=asyncio.subprocess.DEVNULL,
|
|
||||||
stdout=asyncio.subprocess.PIPE,
|
|
||||||
stderr=asyncio.subprocess.PIPE
|
|
||||||
)
|
|
||||||
|
|
||||||
data, error = await proc.communicate()
|
|
||||||
except OSError as err:
|
|
||||||
_LOGGER.error("DBus fatal error: %s", err)
|
|
||||||
raise DBusFatalError() from None
|
|
||||||
|
|
||||||
# Success?
|
|
||||||
if proc.returncode != 0:
|
|
||||||
_LOGGER.error("DBus return error: %s", error)
|
|
||||||
raise DBusFatalError()
|
|
||||||
|
|
||||||
# End
|
|
||||||
return data.decode()
|
|
||||||
|
|
||||||
def attach_signals(self, filters=None):
|
|
||||||
"""Generate a signals wrapper."""
|
|
||||||
return DBusSignalWrapper(self, filters)
|
|
||||||
|
|
||||||
async def wait_signal(self, signal):
|
|
||||||
"""Wait for single event."""
|
|
||||||
monitor = DBusSignalWrapper(self, [signal])
|
|
||||||
async with monitor as signals:
|
|
||||||
async for signal in signals:
|
|
||||||
return signal
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
"""Mapping to dbus method."""
|
|
||||||
return getattr(DBusCallWrapper(self, self.bus_name), name)
|
|
||||||
|
|
||||||
|
|
||||||
class DBusCallWrapper:
|
|
||||||
"""Wrapper a DBus interface for a call."""
|
|
||||||
|
|
||||||
def __init__(self, dbus, interface):
|
|
||||||
"""Initialize wrapper."""
|
|
||||||
self.dbus = dbus
|
|
||||||
self.interface = interface
|
|
||||||
|
|
||||||
def __call__(self):
|
|
||||||
"""Should never be called."""
|
|
||||||
_LOGGER.error("DBus method %s not exists!", self.interface)
|
|
||||||
raise DBusFatalError()
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
"""Mapping to dbus method."""
|
|
||||||
interface = f"{self.interface}.{name}"
|
|
||||||
|
|
||||||
if interface not in self.dbus.methods:
|
|
||||||
return DBusCallWrapper(self.dbus, interface)
|
|
||||||
|
|
||||||
def _method_wrapper(*args):
|
|
||||||
"""Wrap method.
|
|
||||||
|
|
||||||
Return a coroutine
|
|
||||||
"""
|
|
||||||
return self.dbus.call_dbus(interface, *args)
|
|
||||||
|
|
||||||
return _method_wrapper
|
|
||||||
|
|
||||||
|
|
||||||
class DBusSignalWrapper:
|
|
||||||
"""Process Signals."""
|
|
||||||
|
|
||||||
def __init__(self, dbus, signals=None):
|
|
||||||
"""Initialize dbus signal wrapper."""
|
|
||||||
self.dbus = dbus
|
|
||||||
self._signals = signals
|
|
||||||
self._proc = None
|
|
||||||
|
|
||||||
async def __aenter__(self):
|
|
||||||
"""Start monitor events."""
|
|
||||||
_LOGGER.info("Start dbus monitor on %s", self.dbus.bus_name)
|
|
||||||
command = shlex.split(MONITOR.format(
|
|
||||||
bus=self.dbus.bus_name
|
|
||||||
))
|
|
||||||
self._proc = await asyncio.create_subprocess_exec(
|
|
||||||
*command,
|
|
||||||
stdin=asyncio.subprocess.DEVNULL,
|
|
||||||
stdout=asyncio.subprocess.PIPE,
|
|
||||||
stderr=asyncio.subprocess.PIPE
|
|
||||||
)
|
|
||||||
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __aexit__(self, exception_type, exception_value, traceback):
|
|
||||||
"""Stop monitor events."""
|
|
||||||
_LOGGER.info("Stop dbus monitor on %s", self.dbus.bus_name)
|
|
||||||
self._proc.send_signal(SIGINT)
|
|
||||||
await self._proc.communicate()
|
|
||||||
|
|
||||||
def __aiter__(self):
|
|
||||||
"""Start Iteratation."""
|
|
||||||
return self
|
|
||||||
|
|
||||||
async def __anext__(self):
|
|
||||||
"""Get next data."""
|
|
||||||
if not self._proc:
|
|
||||||
raise StopAsyncIteration()
|
|
||||||
|
|
||||||
# Read signals
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
data = await self._proc.stdout.readline()
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
raise StopAsyncIteration() from None
|
|
||||||
|
|
||||||
# Program close
|
|
||||||
if not data:
|
|
||||||
raise StopAsyncIteration()
|
|
||||||
|
|
||||||
# Extract metadata
|
|
||||||
match = RE_MONITOR_OUTPUT.match(data.decode())
|
|
||||||
if not match:
|
|
||||||
continue
|
|
||||||
signal = match.group('signal')
|
|
||||||
data = match.group('data')
|
|
||||||
|
|
||||||
# Filter signals?
|
|
||||||
if self._signals and signal not in self._signals:
|
|
||||||
_LOGGER.debug("Skip event %s - %s", signal, data)
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
return self.dbus.parse_gvariant(data)
|
|
||||||
except DBusParseError:
|
|
||||||
raise StopAsyncIteration() from None
|
|
@@ -1,148 +0,0 @@
|
|||||||
"""Validate functions."""
|
|
||||||
import re
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
import voluptuous as vol
|
|
||||||
|
|
||||||
from .const import (
|
|
||||||
ATTR_ACCESS_TOKEN,
|
|
||||||
ATTR_ADDONS_CUSTOM_LIST,
|
|
||||||
ATTR_BOOT,
|
|
||||||
ATTR_CHANNEL,
|
|
||||||
ATTR_HASSIO,
|
|
||||||
ATTR_HASSOS,
|
|
||||||
ATTR_HASSOS_CLI,
|
|
||||||
ATTR_HOMEASSISTANT,
|
|
||||||
ATTR_IMAGE,
|
|
||||||
ATTR_LAST_BOOT,
|
|
||||||
ATTR_LAST_VERSION,
|
|
||||||
ATTR_PASSWORD,
|
|
||||||
ATTR_PORT,
|
|
||||||
ATTR_REFRESH_TOKEN,
|
|
||||||
ATTR_SESSION,
|
|
||||||
ATTR_SSL,
|
|
||||||
ATTR_TIMEZONE,
|
|
||||||
ATTR_UUID,
|
|
||||||
ATTR_WAIT_BOOT,
|
|
||||||
ATTR_WATCHDOG,
|
|
||||||
CHANNEL_BETA,
|
|
||||||
CHANNEL_DEV,
|
|
||||||
CHANNEL_STABLE,
|
|
||||||
)
|
|
||||||
from .utils.validate import validate_timezone
|
|
||||||
|
|
||||||
RE_REPOSITORY = re.compile(r"^(?P<url>[^#]+)(?:#(?P<branch>[\w\-]+))?$")
|
|
||||||
|
|
||||||
NETWORK_PORT = vol.All(vol.Coerce(int), vol.Range(min=1, max=65535))
|
|
||||||
WAIT_BOOT = vol.All(vol.Coerce(int), vol.Range(min=1, max=60))
|
|
||||||
DOCKER_IMAGE = vol.Match(r"^[\w{}]+/[\-\w{}]+$")
|
|
||||||
ALSA_DEVICE = vol.Maybe(vol.Match(r"\d+,\d+"))
|
|
||||||
CHANNELS = vol.In([CHANNEL_STABLE, CHANNEL_BETA, CHANNEL_DEV])
|
|
||||||
UUID_MATCH = vol.Match(r"^[0-9a-f]{32}$")
|
|
||||||
SHA256 = vol.Match(r"^[0-9a-f]{64}$")
|
|
||||||
TOKEN = vol.Match(r"^[0-9a-f]{32,256}$")
|
|
||||||
|
|
||||||
|
|
||||||
def validate_repository(repository):
|
|
||||||
"""Validate a valid repository."""
|
|
||||||
data = RE_REPOSITORY.match(repository)
|
|
||||||
if not data:
|
|
||||||
raise vol.Invalid("No valid repository format!")
|
|
||||||
|
|
||||||
# Validate URL
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
vol.Url()(data.group("url"))
|
|
||||||
|
|
||||||
return repository
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
REPOSITORIES = vol.All([validate_repository], vol.Unique())
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=inconsistent-return-statements
|
|
||||||
def convert_to_docker_ports(data):
|
|
||||||
"""Convert data into Docker port list."""
|
|
||||||
# dynamic ports
|
|
||||||
if data is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
# single port
|
|
||||||
if isinstance(data, int):
|
|
||||||
return NETWORK_PORT(data)
|
|
||||||
|
|
||||||
# port list
|
|
||||||
if isinstance(data, list) and len(data) > 2:
|
|
||||||
return vol.Schema([NETWORK_PORT])(data)
|
|
||||||
|
|
||||||
# ip port mapping
|
|
||||||
if isinstance(data, list) and len(data) == 2:
|
|
||||||
return (vol.Coerce(str)(data[0]), NETWORK_PORT(data[1]))
|
|
||||||
|
|
||||||
raise vol.Invalid("Can't validate Docker host settings")
|
|
||||||
|
|
||||||
|
|
||||||
DOCKER_PORTS = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.All(
|
|
||||||
vol.Coerce(str), vol.Match(r"^\d+(?:/tcp|/udp)?$")
|
|
||||||
): convert_to_docker_ports
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_HASS_CONFIG = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
|
||||||
vol.Optional(ATTR_ACCESS_TOKEN): TOKEN,
|
|
||||||
vol.Optional(ATTR_BOOT, default=True): vol.Boolean(),
|
|
||||||
vol.Inclusive(ATTR_IMAGE, "custom_hass"): DOCKER_IMAGE,
|
|
||||||
vol.Inclusive(ATTR_LAST_VERSION, "custom_hass"): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_PORT, default=8123): NETWORK_PORT,
|
|
||||||
vol.Optional(ATTR_PASSWORD): vol.Maybe(vol.Coerce(str)),
|
|
||||||
vol.Optional(ATTR_REFRESH_TOKEN): vol.Maybe(vol.Coerce(str)),
|
|
||||||
vol.Optional(ATTR_SSL, default=False): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_WATCHDOG, default=True): vol.Boolean(),
|
|
||||||
vol.Optional(ATTR_WAIT_BOOT, default=600): vol.All(
|
|
||||||
vol.Coerce(int), vol.Range(min=60)
|
|
||||||
),
|
|
||||||
},
|
|
||||||
extra=vol.REMOVE_EXTRA,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_UPDATER_CONFIG = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_CHANNEL, default=CHANNEL_STABLE): CHANNELS,
|
|
||||||
vol.Optional(ATTR_HOMEASSISTANT): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_HASSIO): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_HASSOS): vol.Coerce(str),
|
|
||||||
vol.Optional(ATTR_HASSOS_CLI): vol.Coerce(str),
|
|
||||||
},
|
|
||||||
extra=vol.REMOVE_EXTRA,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# pylint: disable=no-value-for-parameter
|
|
||||||
SCHEMA_HASSIO_CONFIG = vol.Schema(
|
|
||||||
{
|
|
||||||
vol.Optional(ATTR_TIMEZONE, default="UTC"): validate_timezone,
|
|
||||||
vol.Optional(ATTR_LAST_BOOT): vol.Coerce(str),
|
|
||||||
vol.Optional(
|
|
||||||
ATTR_ADDONS_CUSTOM_LIST,
|
|
||||||
default=["https://github.com/hassio-addons/repository"],
|
|
||||||
): REPOSITORIES,
|
|
||||||
vol.Optional(ATTR_WAIT_BOOT, default=5): WAIT_BOOT,
|
|
||||||
},
|
|
||||||
extra=vol.REMOVE_EXTRA,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_AUTH_CONFIG = vol.Schema({SHA256: SHA256})
|
|
||||||
|
|
||||||
|
|
||||||
SCHEMA_INGRESS_CONFIG = vol.Schema(
|
|
||||||
{vol.Required(ATTR_SESSION, default=dict): vol.Schema({TOKEN: vol.Coerce(float)})},
|
|
||||||
extra=vol.REMOVE_EXTRA,
|
|
||||||
)
|
|
Submodule home-assistant-polymer updated: cadcd845cc...0c7c536f73
BIN
misc/hassio.png
BIN
misc/hassio.png
Binary file not shown.
Before Width: | Height: | Size: 37 KiB |
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user