mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-18 21:49:20 +00:00
Compare commits
711 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
20bb890a27 | ||
![]() |
9c53caae80 | ||
![]() |
7a1d85ca2b | ||
![]() |
e684223f32 | ||
![]() |
9744f3354b | ||
![]() |
c6e3787681 | ||
![]() |
eab76a6d1d | ||
![]() |
ebf0fe8397 | ||
![]() |
6549a10935 | ||
![]() |
530d40dbbd | ||
![]() |
50f2d8e7d8 | ||
![]() |
7a9aac491e | ||
![]() |
7dcb609fd5 | ||
![]() |
d119e99001 | ||
![]() |
fe0e41adec | ||
![]() |
034393bd42 | ||
![]() |
02e72726a5 | ||
![]() |
d599c3ad76 | ||
![]() |
b00f7c44df | ||
![]() |
028b170cff | ||
![]() |
8da686fc34 | ||
![]() |
3f6453aa89 | ||
![]() |
7967254673 | ||
![]() |
0f60fdd20b | ||
![]() |
ccb8e5fe06 | ||
![]() |
ba576d8748 | ||
![]() |
edcd9ca6e6 | ||
![]() |
ac4277cd7b | ||
![]() |
4c525de5e2 | ||
![]() |
cb751e0397 | ||
![]() |
ac457c1c28 | ||
![]() |
caa77b9337 | ||
![]() |
96d8785349 | ||
![]() |
e4f57d2269 | ||
![]() |
f946de1e46 | ||
![]() |
d588987b8b | ||
![]() |
4925b5fa97 | ||
![]() |
aa3f6390d3 | ||
![]() |
6ba413f452 | ||
![]() |
10b6706e4a | ||
![]() |
17559bfc8e | ||
![]() |
9dc2f43ffb | ||
![]() |
38db375fea | ||
![]() |
f35b6d0b00 | ||
![]() |
8d75583a07 | ||
![]() |
361fc51477 | ||
![]() |
f6019b4e68 | ||
![]() |
998dd5387b | ||
![]() |
3b7776ca01 | ||
![]() |
5788d1dd32 | ||
![]() |
ff04d339f4 | ||
![]() |
e9d03c5c8e | ||
![]() |
ab4b98470e | ||
![]() |
c4f0702595 | ||
![]() |
736c9cb2bd | ||
![]() |
f24e8535d3 | ||
![]() |
8e4f3e0526 | ||
![]() |
a9abd933b5 | ||
![]() |
f1121fe66f | ||
![]() |
c26a2e399c | ||
![]() |
1af90721cc | ||
![]() |
9274a0fa17 | ||
![]() |
9443032c2a | ||
![]() |
8deb1cf2e6 | ||
![]() |
13c7ce6a0a | ||
![]() |
0f54824cdb | ||
![]() |
10cd722806 | ||
![]() |
4aca056c5b | ||
![]() |
15a8f40f6f | ||
![]() |
2ca2701f7a | ||
![]() |
78f63380f2 | ||
![]() |
7633d26806 | ||
![]() |
15cae6562f | ||
![]() |
c3546eb566 | ||
![]() |
2a77a29f48 | ||
![]() |
77be115eec | ||
![]() |
64a6c2e07c | ||
![]() |
045a3ba416 | ||
![]() |
4da2715d14 | ||
![]() |
2f0e99d420 | ||
![]() |
8694eaaf1a | ||
![]() |
0761885ebb | ||
![]() |
ca60a69b22 | ||
![]() |
c9db42583b | ||
![]() |
052a691a4d | ||
![]() |
2bcb0e5195 | ||
![]() |
745845db19 | ||
![]() |
de064d1d9c | ||
![]() |
3b2351af0b | ||
![]() |
5cf833e3d6 | ||
![]() |
409b53109b | ||
![]() |
da83edf231 | ||
![]() |
7be508214a | ||
![]() |
8b4a137252 | ||
![]() |
4565b01eeb | ||
![]() |
0675f66ee6 | ||
![]() |
b60d57c3a0 | ||
![]() |
0e3d95cac0 | ||
![]() |
548737a559 | ||
![]() |
598108d294 | ||
![]() |
a0261dbbcc | ||
![]() |
2418122b46 | ||
![]() |
f104e60afa | ||
![]() |
ed45f27f3e | ||
![]() |
40aa5c9caf | ||
![]() |
14b1ea4eb0 | ||
![]() |
5052a339e3 | ||
![]() |
2321890dde | ||
![]() |
4cb5770ee0 | ||
![]() |
3a35561d1d | ||
![]() |
6fbec53f8a | ||
![]() |
c707934018 | ||
![]() |
efd8efa248 | ||
![]() |
979861b764 | ||
![]() |
cdc53a159c | ||
![]() |
a203ed9cc5 | ||
![]() |
5cab5f0c08 | ||
![]() |
25ea80e169 | ||
![]() |
f43b4e9e24 | ||
![]() |
160fbb2589 | ||
![]() |
c85aa664e1 | ||
![]() |
51dcbf5db7 | ||
![]() |
fa114a4a03 | ||
![]() |
d7fd58bdb9 | ||
![]() |
38b0aea8e2 | ||
![]() |
41eade9325 | ||
![]() |
e64cf41aec | ||
![]() |
02872b5e75 | ||
![]() |
e4d49bb459 | ||
![]() |
d38b7d5a82 | ||
![]() |
537c5d3197 | ||
![]() |
575df2fcf6 | ||
![]() |
c08c3c6b37 | ||
![]() |
2acf28609e | ||
![]() |
bb59d0431e | ||
![]() |
1c7b1f1462 | ||
![]() |
f32d17d924 | ||
![]() |
928a4d8dce | ||
![]() |
dd3ba93308 | ||
![]() |
7e1b179cdd | ||
![]() |
a9a2c35f06 | ||
![]() |
58b88a6919 | ||
![]() |
f937876a1b | ||
![]() |
8193f43634 | ||
![]() |
1d3f880f82 | ||
![]() |
ef2fa8d2e2 | ||
![]() |
51997b3e7c | ||
![]() |
98785b00e2 | ||
![]() |
8d3694884d | ||
![]() |
a2821a98ad | ||
![]() |
8d552ae15c | ||
![]() |
6db4c60f47 | ||
![]() |
805c0385a0 | ||
![]() |
cea6e7a9f2 | ||
![]() |
127073c01b | ||
![]() |
30fe36ae05 | ||
![]() |
58bd677832 | ||
![]() |
1a3b369dd7 | ||
![]() |
6e38216abd | ||
![]() |
efcfc1f841 | ||
![]() |
8dea50ce83 | ||
![]() |
7a5a01bdcc | ||
![]() |
bd1450a682 | ||
![]() |
c538c1ce7f | ||
![]() |
b6d59c4f64 | ||
![]() |
a758ccaf5c | ||
![]() |
e8b04cc20a | ||
![]() |
9bcb15dbc0 | ||
![]() |
1e953167b6 | ||
![]() |
979586cdb2 | ||
![]() |
cd31fad56d | ||
![]() |
ff57d88e2a | ||
![]() |
06cb5e171e | ||
![]() |
a8b70a2e13 | ||
![]() |
948019ccee | ||
![]() |
89ed109505 | ||
![]() |
fae246c503 | ||
![]() |
2411b4287d | ||
![]() |
b3308ecbe0 | ||
![]() |
3541cbff5e | ||
![]() |
838ba7ff36 | ||
![]() |
e9802f92c9 | ||
![]() |
016fd24859 | ||
![]() |
d315e81ab2 | ||
![]() |
97c38b8534 | ||
![]() |
011e2b3df5 | ||
![]() |
e3ee9a299f | ||
![]() |
d73c10f874 | ||
![]() |
9e448b46ba | ||
![]() |
9f09c46789 | ||
![]() |
fe6634551a | ||
![]() |
22a7931a7c | ||
![]() |
94f112512f | ||
![]() |
b6509dca1f | ||
![]() |
620234e708 | ||
![]() |
d50e866cec | ||
![]() |
76ad6dca02 | ||
![]() |
cdb1520a63 | ||
![]() |
bbef706a33 | ||
![]() |
835509901f | ||
![]() |
b51f9586c4 | ||
![]() |
fc83cb9559 | ||
![]() |
f5f5f829ac | ||
![]() |
930eed4500 | ||
![]() |
01a8b58054 | ||
![]() |
eba1d01fc2 | ||
![]() |
84755836c9 | ||
![]() |
c9585033cb | ||
![]() |
2d312c276f | ||
![]() |
3b0d0e9928 | ||
![]() |
8307b153e3 | ||
![]() |
dfaffe3ec5 | ||
![]() |
8d7b15cbeb | ||
![]() |
00969a67ac | ||
![]() |
a374d4e817 | ||
![]() |
f5dda39f63 | ||
![]() |
fb5d54d5fe | ||
![]() |
d392b35fdd | ||
![]() |
3ceec006ac | ||
![]() |
62a574c6bd | ||
![]() |
821c10b2bd | ||
![]() |
fa3269a098 | ||
![]() |
a9bdab4b49 | ||
![]() |
0df5b7d87b | ||
![]() |
4861fc70ce | ||
![]() |
47c443bb92 | ||
![]() |
9cb4b49597 | ||
![]() |
865523fd37 | ||
![]() |
1df35a6fe1 | ||
![]() |
e70c9d8a30 | ||
![]() |
7d6b00ea4a | ||
![]() |
e5fc985915 | ||
![]() |
71ccaa2bd0 | ||
![]() |
e127f23a08 | ||
![]() |
495f9f2373 | ||
![]() |
27274286db | ||
![]() |
85ba886029 | ||
![]() |
2f3a868e42 | ||
![]() |
a51b80f456 | ||
![]() |
f27a426879 | ||
![]() |
19ca485c28 | ||
![]() |
7deed55c2d | ||
![]() |
4c5c6f072c | ||
![]() |
f174e08ad6 | ||
![]() |
2658f95347 | ||
![]() |
311c981d1a | ||
![]() |
d6d3bf0583 | ||
![]() |
a1a601a4d3 | ||
![]() |
14776eae76 | ||
![]() |
bef4034ab8 | ||
![]() |
ad988f2a24 | ||
![]() |
6599ae0ee0 | ||
![]() |
4f1ed690cd | ||
![]() |
4ffaee6013 | ||
![]() |
e1ce19547e | ||
![]() |
039040b972 | ||
![]() |
7a1af3d346 | ||
![]() |
1e98774b62 | ||
![]() |
4b4d6c6866 | ||
![]() |
65ff83d359 | ||
![]() |
e509c804ae | ||
![]() |
992827e225 | ||
![]() |
083e97add8 | ||
![]() |
05378d18c0 | ||
![]() |
3dd465acc9 | ||
![]() |
8f6e36f781 | ||
![]() |
85fe56db57 | ||
![]() |
8e07429e47 | ||
![]() |
ced6d702b9 | ||
![]() |
25d7de4dfa | ||
![]() |
82754c0dfe | ||
![]() |
e604b022ee | ||
![]() |
6b29022822 | ||
![]() |
2e671cc5ee | ||
![]() |
f25692b98c | ||
![]() |
c4a011b261 | ||
![]() |
a935bac20b | ||
![]() |
0a3a98cb42 | ||
![]() |
adb39ca93f | ||
![]() |
5fdc340e58 | ||
![]() |
bb64dca6e6 | ||
![]() |
685788bcdf | ||
![]() |
e949aa35f3 | ||
![]() |
fc80bf0df4 | ||
![]() |
bd9740e866 | ||
![]() |
3a260a8fd9 | ||
![]() |
c87e6a5a42 | ||
![]() |
8bc3319523 | ||
![]() |
bdfcf1a2df | ||
![]() |
7f4284f2af | ||
![]() |
fd69120aa6 | ||
![]() |
5df60b17e8 | ||
![]() |
cb835b5ae6 | ||
![]() |
9eab92513a | ||
![]() |
29e8f50ab8 | ||
![]() |
aa0496b236 | ||
![]() |
06e9cec21a | ||
![]() |
0fe27088df | ||
![]() |
54d226116d | ||
![]() |
4b37e30680 | ||
![]() |
7c5f710deb | ||
![]() |
5a3ebaf683 | ||
![]() |
233da0e48f | ||
![]() |
96380d8d28 | ||
![]() |
c84a0edf20 | ||
![]() |
a3cf445c93 | ||
![]() |
3f31979f66 | ||
![]() |
44416edfd2 | ||
![]() |
351c45da75 | ||
![]() |
e27c5dad15 | ||
![]() |
dc510f22ac | ||
![]() |
1b78011f8b | ||
![]() |
a908828bf4 | ||
![]() |
55b7eb62f6 | ||
![]() |
10e8fcf3b9 | ||
![]() |
f1b0c05447 | ||
![]() |
de22bd688e | ||
![]() |
9fe35b4fb5 | ||
![]() |
f13d08d37a | ||
![]() |
a0ecb46584 | ||
![]() |
0c57df0c8e | ||
![]() |
9c902c5c69 | ||
![]() |
af412c3105 | ||
![]() |
ec43448163 | ||
![]() |
9f7e0ecd55 | ||
![]() |
e50515a17c | ||
![]() |
7c345db6fe | ||
![]() |
51c2268c1e | ||
![]() |
51feca05a5 | ||
![]() |
3889504292 | ||
![]() |
7bd6ff374a | ||
![]() |
44fa34203a | ||
![]() |
ff351c7f6d | ||
![]() |
960b00d85a | ||
![]() |
18e3eacd7f | ||
![]() |
f4a1da33c4 | ||
![]() |
49de5be44e | ||
![]() |
383657e8ce | ||
![]() |
3af970ead6 | ||
![]() |
6caec79958 | ||
![]() |
33bbd92d9b | ||
![]() |
9dba78fbcd | ||
![]() |
630d85ec78 | ||
![]() |
f0d46e8671 | ||
![]() |
db0593f0b2 | ||
![]() |
1d83c0c77a | ||
![]() |
5e5fd3a79b | ||
![]() |
c61995aab8 | ||
![]() |
37c393f857 | ||
![]() |
8e043a01c1 | ||
![]() |
c7b6b2ddb3 | ||
![]() |
522f68bf68 | ||
![]() |
7d4866234f | ||
![]() |
7aa5bcfc7c | ||
![]() |
04b59f0896 | ||
![]() |
796f9a203e | ||
![]() |
22c8cda0d7 | ||
![]() |
1cf534ccc5 | ||
![]() |
6d8c821148 | ||
![]() |
264e9665b0 | ||
![]() |
53fa8e48c0 | ||
![]() |
e406aa4144 | ||
![]() |
4953ba5077 | ||
![]() |
0a97ac0578 | ||
![]() |
56af4752f4 | ||
![]() |
81413d08ed | ||
![]() |
2bc2a476d9 | ||
![]() |
4d070a65c6 | ||
![]() |
6185fbaf26 | ||
![]() |
698a126b93 | ||
![]() |
acf921f55d | ||
![]() |
f5a78c88f8 | ||
![]() |
206ece1575 | ||
![]() |
a8028dbe10 | ||
![]() |
c605af6ccc | ||
![]() |
b7b8e6c40e | ||
![]() |
3fcb1de419 | ||
![]() |
12034fe5fc | ||
![]() |
56959d781a | ||
![]() |
9a2f025646 | ||
![]() |
12cc163058 | ||
![]() |
74971d9753 | ||
![]() |
a9157e3a9f | ||
![]() |
b96697b708 | ||
![]() |
81e6896391 | ||
![]() |
2dcaa3608d | ||
![]() |
e21671ec5e | ||
![]() |
7841f14163 | ||
![]() |
cc9f594ab4 | ||
![]() |
ebfaaeaa6b | ||
![]() |
ffa91e150d | ||
![]() |
06fa9f9a9e | ||
![]() |
9f203c42ec | ||
![]() |
5d0d34a4af | ||
![]() |
c2cfc0d3d4 | ||
![]() |
0f4810d41f | ||
![]() |
175848f2a8 | ||
![]() |
472bd66f4d | ||
![]() |
168ea32d2c | ||
![]() |
e82d6b1ea4 | ||
![]() |
6c60ca088c | ||
![]() |
83e8f935fd | ||
![]() |
71867302a4 | ||
![]() |
8bcc402c5f | ||
![]() |
72b7d2a123 | ||
![]() |
20c1183450 | ||
![]() |
0bbfbd2544 | ||
![]() |
350bd9c32f | ||
![]() |
dcca8b0a9a | ||
![]() |
f77b479e45 | ||
![]() |
216565affb | ||
![]() |
6f235c2a11 | ||
![]() |
27a770bd1d | ||
![]() |
ef15b67571 | ||
![]() |
6aad966c52 | ||
![]() |
9811f11859 | ||
![]() |
13148ec7fb | ||
![]() |
b2d7464790 | ||
![]() |
ce84e185ad | ||
![]() |
c3f5ee43b6 | ||
![]() |
e2dc1a4471 | ||
![]() |
e787e59b49 | ||
![]() |
f0ed2eba2b | ||
![]() |
2364e1e652 | ||
![]() |
cc56944d75 | ||
![]() |
69cea9fc96 | ||
![]() |
fcebc9d1ed | ||
![]() |
9350e4f961 | ||
![]() |
387e0ad03e | ||
![]() |
61fec8b290 | ||
![]() |
1228baebf4 | ||
![]() |
a30063e85c | ||
![]() |
524cebac4d | ||
![]() |
c94114a566 | ||
![]() |
b6ec7a9e64 | ||
![]() |
69be7a6d22 | ||
![]() |
58155c35f9 | ||
![]() |
7b2377291f | ||
![]() |
657ee84e39 | ||
![]() |
2e4b545265 | ||
![]() |
2de1d35dd1 | ||
![]() |
2b082b362d | ||
![]() |
dfdd0d6b4b | ||
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a | ||
![]() |
39baea759a | ||
![]() |
80ddb1d262 | ||
![]() |
e24987a610 | ||
![]() |
9e5c276e3b | ||
![]() |
c33d31996d | ||
![]() |
aa1f08fe8a | ||
![]() |
d78689554a | ||
![]() |
5bee1d851c | ||
![]() |
ddb8eef4d1 | ||
![]() |
da513e7347 | ||
![]() |
4279d7fd16 | ||
![]() |
934eab2e8c | ||
![]() |
2a31edc768 | ||
![]() |
fcdd66dc6e | ||
![]() |
a65d3222b9 | ||
![]() |
36179596a0 | ||
![]() |
c083c850c1 | ||
![]() |
ff903d7b5a | ||
![]() |
dd603e1ec2 | ||
![]() |
a2f06b1553 | ||
![]() |
8115d2b3d3 | ||
![]() |
4f97bb9e0b | ||
![]() |
84d24a2c4d | ||
![]() |
b709061656 | ||
![]() |
cd9034b3f1 | ||
![]() |
25d324c73a | ||
![]() |
3a834d1a73 | ||
![]() |
e9fecb817d | ||
![]() |
56e70d7ec4 | ||
![]() |
2e73a85aa9 | ||
![]() |
1e119e9c03 | ||
![]() |
6f6e5c97df | ||
![]() |
6ef99974cf | ||
![]() |
8984b9aef6 | ||
![]() |
63e08b15bc | ||
![]() |
319b2b5d4c | ||
![]() |
bae7bb8ce4 | ||
![]() |
0b44df366c | ||
![]() |
f253c797af | ||
![]() |
0a8b1c2797 | ||
![]() |
3b45fb417b | ||
![]() |
2a2d92e3c5 | ||
![]() |
a320e42ed5 | ||
![]() |
fdef712e01 | ||
![]() |
5717ac19d7 | ||
![]() |
33d7d76fee | ||
![]() |
73bdaa623c | ||
![]() |
8ca8f59a0b | ||
![]() |
745af3c039 | ||
![]() |
5d17e1011a | ||
![]() |
826464c41b | ||
![]() |
a643df8cac | ||
![]() |
24ded99286 | ||
![]() |
6646eee504 | ||
![]() |
f55c10914e | ||
![]() |
b1e768f69e | ||
![]() |
4702f8bd5e | ||
![]() |
69959b2c97 | ||
![]() |
9d6f4f5392 | ||
![]() |
36b9a609bf | ||
![]() |
36ae0c82b6 | ||
![]() |
e11011ee51 | ||
![]() |
9125211a57 | ||
![]() |
3a4ef6ceb3 | ||
![]() |
ca82993278 | ||
![]() |
0925af91e3 | ||
![]() |
80bc32243c | ||
![]() |
f0d232880d | ||
![]() |
7c790dbbd9 | ||
![]() |
899b17e992 | ||
![]() |
d1b4521290 | ||
![]() |
9bb4feef29 | ||
![]() |
4bcdc98a31 | ||
![]() |
26f8c1df92 | ||
![]() |
a481ad73f3 | ||
![]() |
e4ac17fea6 | ||
![]() |
bcd940e95b | ||
![]() |
5365aa4466 | ||
![]() |
a0d106529c | ||
![]() |
bf1a9ec42d | ||
![]() |
fc5d97562f | ||
![]() |
f5c171e44f | ||
![]() |
a3c3f15806 | ||
![]() |
ef58a219ec | ||
![]() |
6708fe36e3 | ||
![]() |
e02fa2824c | ||
![]() |
a20f927082 | ||
![]() |
6d71e3fe81 | ||
![]() |
4056fcd75d | ||
![]() |
1e723cf0e3 | ||
![]() |
ce3f670597 | ||
![]() |
ce3d3d58ec | ||
![]() |
a92cab48e0 | ||
![]() |
ee76317392 | ||
![]() |
380ca13be1 | ||
![]() |
93f4c5e207 | ||
![]() |
e438858da0 | ||
![]() |
428a4dd849 | ||
![]() |
39cc8aaa13 | ||
![]() |
39a62864de | ||
![]() |
71a162a871 | ||
![]() |
05d7eff09a | ||
![]() |
7b8ad0782d | ||
![]() |
df3e9e3a5e | ||
![]() |
8cdc769ec8 | ||
![]() |
76e1304241 | ||
![]() |
eb9b1ff03d | ||
![]() |
b3b12d35fd | ||
![]() |
74485262e7 | ||
![]() |
615e68b29b | ||
![]() |
927b4695c9 | ||
![]() |
11811701d0 | ||
![]() |
05c8022db3 | ||
![]() |
a9ebb147c5 | ||
![]() |
ba8ca4d9ee | ||
![]() |
3574df1385 | ||
![]() |
b4497d231b | ||
![]() |
5aa9b0245a | ||
![]() |
4c72c3aafc | ||
![]() |
bf4f40f991 | ||
![]() |
603334f4f3 | ||
![]() |
46548af165 | ||
![]() |
8ef32b40c8 | ||
![]() |
fb25377087 | ||
![]() |
a75fd2d07e | ||
![]() |
e30f39e97e | ||
![]() |
4818ad7465 | ||
![]() |
5e4e9740c7 | ||
![]() |
d4e41dbf80 | ||
![]() |
cea1a1a15f | ||
![]() |
c2700b14dc | ||
![]() |
07d27170db | ||
![]() |
8eb8c07df6 | ||
![]() |
7bee6f884c | ||
![]() |
78dd20e314 | ||
![]() |
2a011b6448 | ||
![]() |
5c90370ec8 | ||
![]() |
120465b88d | ||
![]() |
c77292439a | ||
![]() |
0a0209f81a | ||
![]() |
69a7ed8a5c | ||
![]() |
8df35ab488 | ||
![]() |
a12567d0a8 | ||
![]() |
64fe190119 | ||
![]() |
e3ede66943 | ||
![]() |
2672b800d4 | ||
![]() |
c60d4bda92 | ||
![]() |
db9d0f2639 | ||
![]() |
02d4045ec3 | ||
![]() |
a308ea6927 | ||
![]() |
edc5e5e812 | ||
![]() |
23b65cb479 | ||
![]() |
e5eabd2143 | ||
![]() |
b0dd043975 | ||
![]() |
435a1096ed | ||
![]() |
21a9084ca0 | ||
![]() |
10d9135d86 | ||
![]() |
272d8b29f3 | ||
![]() |
3d665b9eec | ||
![]() |
c563f484c9 | ||
![]() |
38268ea4ea | ||
![]() |
c1ad64cddf | ||
![]() |
b898cd2a3a | ||
![]() |
937b31d845 | ||
![]() |
e4e655493b | ||
![]() |
387d2dcc2e | ||
![]() |
8abe33d48a | ||
![]() |
860442d5c4 | ||
![]() |
ce5183ce16 | ||
![]() |
3e69b04b86 | ||
![]() |
8b9cd4f122 | ||
![]() |
c0e3ccdb83 | ||
![]() |
e8cc85c487 | ||
![]() |
b3eff41692 | ||
![]() |
1ea63f185c | ||
![]() |
a513d5c09a | ||
![]() |
fb8216c102 | ||
![]() |
4f381d01df | ||
![]() |
de3382226e | ||
![]() |
77be830b72 | ||
![]() |
09c0e1320f | ||
![]() |
cc4ee59542 | ||
![]() |
1f448744f3 | ||
![]() |
ee2c257057 | ||
![]() |
be8439d4ac | ||
![]() |
981f2b193c | ||
![]() |
39087e09ce | ||
![]() |
59960efb9c | ||
![]() |
5a53bb5981 | ||
![]() |
a67fe69cbb | ||
![]() |
9ce2b0765f | ||
![]() |
2e53a48504 | ||
![]() |
8e4db0c3ec | ||
![]() |
4072b06faf | ||
![]() |
a2cf7ece70 | ||
![]() |
734fe3afde | ||
![]() |
7f3bc91c1d | ||
![]() |
9c2c95757d | ||
![]() |
b5ed6c586a | ||
![]() |
35033d1f76 | ||
![]() |
9e41d0c5b0 |
@@ -1,4 +1,4 @@
|
||||
FROM python:3.7
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
@@ -13,7 +13,7 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
nodejs \
|
||||
yarn \
|
||||
&& curl -o - https://raw.githubusercontent.com/creationix/nvm/v0.34.0/install.sh | bash \
|
||||
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV NVM_DIR /root/.nvm
|
||||
|
||||
@@ -33,11 +33,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
containerd.io \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt /workspaces/
|
||||
RUN pip install -r requirements.txt \
|
||||
&& pip3 install -r requirements_tests.txt \
|
||||
&& pip install black tox
|
||||
# Install tools
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
jq \
|
||||
dbus \
|
||||
network-manager \
|
||||
libpulse0 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL /bin/bash
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt ./
|
||||
RUN pip3 install -U setuptools pip \
|
||||
&& pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
&& pip3 install tox \
|
||||
&& rm -f requirements.txt requirements_tests.txt
|
||||
|
@@ -1,29 +1,32 @@
|
||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||
{
|
||||
"name": "Hass.io dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"runArgs": [
|
||||
"-e",
|
||||
"GIT_EDITOR='code --wait'",
|
||||
"--privileged"
|
||||
],
|
||||
"extensions": [
|
||||
"ms-python.python"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": [
|
||||
"--target-version",
|
||||
"py37"
|
||||
],
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true
|
||||
}
|
||||
}
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"postCreateCommand": "pre-commit install",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||
}
|
||||
}
|
||||
|
@@ -14,10 +14,10 @@
|
||||
# virtualenv
|
||||
venv/
|
||||
|
||||
# HA
|
||||
home-assistant-polymer/*
|
||||
misc/*
|
||||
script/*
|
||||
# Data
|
||||
home-assistant-polymer/
|
||||
script/
|
||||
tests/
|
||||
|
||||
# Test ENV
|
||||
data/
|
||||
|
13
.github/ISSUE_TEMPLATE.md
vendored
13
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,15 +1,15 @@
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/core/releases
|
||||
- Do not report issues for integrations here, please refer to https://github.com/home-assistant/core/issues
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with a Add-on, make a issue on there repository.
|
||||
- If you have a problem with an add-on, make an issue in its repository.
|
||||
-->
|
||||
|
||||
**Home Assistant release with the issue:**
|
||||
<!--
|
||||
- Frontend -> Developer tools -> Info
|
||||
- Frontend -> Configuration -> Info
|
||||
- Or use this command: hass --version
|
||||
-->
|
||||
|
||||
@@ -20,10 +20,9 @@ Please provide details about your environment.
|
||||
|
||||
**Supervisor logs:**
|
||||
<!--
|
||||
- Frontend -> Hass.io -> System
|
||||
- Or use this command: hassio su logs
|
||||
- Frontend -> Supervisor -> System
|
||||
- Or use this command: ha supervisor logs
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
||||
|
68
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
68
.github/PULL_REQUEST_TEMPLATE.md
vendored
Normal file
@@ -0,0 +1,68 @@
|
||||
<!--
|
||||
You are amazing! Thanks for contributing to our project!
|
||||
Please, DO NOT DELETE ANY TEXT from this template! (unless instructed).
|
||||
-->
|
||||
|
||||
## Proposed change
|
||||
|
||||
<!--
|
||||
Describe the big picture of your changes here to communicate to the
|
||||
maintainers why we should accept this pull request. If it fixes a bug
|
||||
or resolves a feature request, be sure to link to that issue in the
|
||||
additional information section.
|
||||
-->
|
||||
|
||||
## Type of change
|
||||
|
||||
<!--
|
||||
What type of change does your PR introduce to Home Assistant?
|
||||
NOTE: Please, check only 1! box!
|
||||
If your PR requires multiple boxes to be checked, you'll most likely need to
|
||||
split it into multiple PRs. This makes things easier and faster to code review.
|
||||
-->
|
||||
|
||||
- [ ] Dependency upgrade
|
||||
- [ ] Bugfix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (which adds functionality to the supervisor)
|
||||
- [ ] Breaking change (fix/feature causing existing functionality to break)
|
||||
- [ ] Code quality improvements to existing code or addition of tests
|
||||
|
||||
## Additional information
|
||||
|
||||
<!--
|
||||
Details are important, and help maintainers processing your PR.
|
||||
Please be sure to fill out additional details, if applicable.
|
||||
-->
|
||||
|
||||
- This PR fixes or closes issue: fixes #
|
||||
- This PR is related to issue:
|
||||
- Link to documentation pull request:
|
||||
|
||||
## Checklist
|
||||
|
||||
<!--
|
||||
Put an `x` in the boxes that apply. You can also fill these out after
|
||||
creating the PR. If you're unsure about any of them, don't hesitate to ask.
|
||||
We're here to help! This is simply a reminder of what we are going to look
|
||||
for before merging your code.
|
||||
-->
|
||||
|
||||
- [ ] The code change is tested and works locally.
|
||||
- [ ] Local tests pass. **Your PR cannot be merged unless tests pass**
|
||||
- [ ] There is no commented out code in this PR.
|
||||
- [ ] I have followed the [development checklist][dev-checklist]
|
||||
- [ ] The code has been formatted using Black (`black --fast supervisor tests`)
|
||||
- [ ] Tests have been added to verify that the new code works.
|
||||
|
||||
If API endpoints of add-on configuration are added/changed:
|
||||
|
||||
- [ ] Documentation added/updated for [developers.home-assistant.io][docs-repository]
|
||||
|
||||
<!--
|
||||
Thank you for contributing <3
|
||||
|
||||
Below, some useful links you could explore:
|
||||
-->
|
||||
|
||||
[dev-checklist]: https://developers.home-assistant.io/docs/en/development_checklist.html
|
||||
[docs-repository]: https://github.com/home-assistant/developers.home-assistant
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2020-01-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
3
.github/stale.yml
vendored
3
.github/stale.yml
vendored
@@ -6,8 +6,9 @@ daysUntilClose: 7
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- rfc
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
|
432
.github/workflows/ci.yaml
vendored
Normal file
432
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,432 @@
|
||||
name: CI
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
- master
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.8
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
|
||||
jobs:
|
||||
# Separate job to pre-populate the base dependency cache
|
||||
# This prevent upcoming jobs to do the same individually
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.1.4
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
pip install -U pip setuptools
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pre-commit-
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-black:
|
||||
name: Check black
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run black
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
black --target-version py38 --check supervisor tests setup.py
|
||||
|
||||
lint-dockerfile:
|
||||
name: Check Dockerfile
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
- name: Check Dockerfile
|
||||
uses: docker://hadolint/hadolint:v1.18.0
|
||||
with:
|
||||
args: hadolint Dockerfile
|
||||
|
||||
lint-executable-shebangs:
|
||||
name: Check executables
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 supervisor tests
|
||||
|
||||
lint-isort:
|
||||
name: Check isort
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run isort
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files
|
||||
|
||||
lint-pylint:
|
||||
name: Check pylint
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||
- name: Run pylint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
lint-pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Run tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Install additional system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Install Pytest Annotation plugin
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Ideally this should be part of our dependencies
|
||||
# However this plugin is fairly new and doesn't run correctly
|
||||
# on a non-GitHub environment.
|
||||
pip install pytest-github-actions-annotate-failures
|
||||
- name: Run pytest
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pytest \
|
||||
-qq \
|
||||
--timeout=10 \
|
||||
--durations=10 \
|
||||
--cov supervisor \
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v2.2.0
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
|
||||
coverage:
|
||||
name: Process test coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.4
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
coverage combine coverage*/.coverage*
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1.0.14
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-executables-have-shebangs",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+)$",
|
||||
"file": 1,
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-json",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||
"file": 1,
|
||||
"message": 2,
|
||||
"line": 3,
|
||||
"column": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "hadolint",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3,
|
||||
"code": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "pylint-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "pylint-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "python",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||
"file": 1,
|
||||
"line": 2
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
15
.github/workflows/release-drafter.yml
vendored
Normal file
15
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
21
.github/workflows/sentry.yaml
vendored
Normal file
21
.github/workflows/sentry.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Sentry Release
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
release:
|
||||
types: [published, prereleased]
|
||||
|
||||
jobs:
|
||||
createSentryRelease:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.1
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
environment: production
|
5
.gitignore
vendored
5
.gitignore
vendored
@@ -95,3 +95,8 @@ ENV/
|
||||
.vscode/*
|
||||
!.vscode/cSpell.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
|
||||
# mypy
|
||||
/.mypy_cache/*
|
||||
/.dmypy.json
|
||||
|
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py38
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.3
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.5.0
|
||||
- pydocstyle==5.0.2
|
||||
files: ^(supervisor|script|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v4.3.21
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
18
.vscode/launch.json
vendored
Normal file
18
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Supervisor remote debug",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 33333,
|
||||
"host": "172.30.32.2",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "/usr/src/supervisor"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
180
.vscode/tasks.json
vendored
180
.vscode/tasks.json
vendored
@@ -1,92 +1,90 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker run --rm -ti -v /etc/machine-id:/etc/machine-id --network=hassio --add-host hassio:172.30.32.2 homeassistant/amd64-hassio-cli:dev",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 hassio tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint hassio",
|
||||
"dependsOn": [
|
||||
"Install all Requirements"
|
||||
],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint supervisor",
|
||||
"dependsOn": ["Install all Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
|
800
API.md
800
API.md
@@ -1,800 +0,0 @@
|
||||
# Hass.io
|
||||
|
||||
## Hass.io RESTful API
|
||||
|
||||
Interface for Home Assistant to control things from supervisor.
|
||||
|
||||
On error / Code 400:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "error",
|
||||
"message": ""
|
||||
}
|
||||
```
|
||||
|
||||
On success / Code 200:
|
||||
|
||||
```json
|
||||
{
|
||||
"result": "ok",
|
||||
"data": { }
|
||||
}
|
||||
```
|
||||
|
||||
For access to API you need set the `X-HASSIO-KEY` they will be available for Add-ons/HomeAssistant with environment `HASSIO_TOKEN`.
|
||||
|
||||
### Hass.io
|
||||
|
||||
- GET `/supervisor/ping`
|
||||
|
||||
This API call don't need a token.
|
||||
|
||||
- GET `/supervisor/info`
|
||||
|
||||
The addons from `addons` are only installed one.
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "armhf|aarch64|i386|amd64",
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"ip_address": "ip address",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"repository": "12345678|null",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "INSTALL_VERSION",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"state": "started|stopped",
|
||||
}
|
||||
],
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"channel": "stable|beta|dev",
|
||||
"timezone": "TIMEZONE",
|
||||
"wait_boot": "int",
|
||||
"debug": "bool",
|
||||
"debug_block": "bool",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"addons_repositories": [
|
||||
"REPO_URL"
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/supervisor/reload`
|
||||
|
||||
Reload addons/version.
|
||||
|
||||
- GET `/supervisor/logs`
|
||||
|
||||
Output is the raw docker log.
|
||||
|
||||
- GET `/supervisor/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/supervisor/repair`
|
||||
|
||||
Repair overlayfs issue and restore lost images
|
||||
|
||||
### Snapshot
|
||||
|
||||
- GET `/snapshots`
|
||||
|
||||
```json
|
||||
{
|
||||
"snapshots": [
|
||||
{
|
||||
"slug": "SLUG",
|
||||
"date": "ISO",
|
||||
"name": "Custom name",
|
||||
"type": "full|partial",
|
||||
"protected": "bool"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- POST `/snapshots/new/upload`
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional",
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/new/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "Optional",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"slug": ""
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/reload`
|
||||
|
||||
- GET `/snapshots/{slug}/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"slug": "SNAPSHOT ID",
|
||||
"type": "full|partial",
|
||||
"name": "custom snapshot name / description",
|
||||
"date": "ISO",
|
||||
"size": "SIZE_IN_MB",
|
||||
"protected": "bool",
|
||||
"homeassistant": "version",
|
||||
"addons": [
|
||||
{
|
||||
"slug": "ADDON_SLUG",
|
||||
"name": "NAME",
|
||||
"version": "INSTALLED_VERSION",
|
||||
"size": "SIZE_IN_MB"
|
||||
}
|
||||
],
|
||||
"repositories": ["URL"],
|
||||
"folders": ["NAME"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/remove`
|
||||
|
||||
- GET `/snapshots/{slug}/download`
|
||||
|
||||
- POST `/snapshots/{slug}/restore/full`
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/snapshots/{slug}/restore/partial`
|
||||
|
||||
```json
|
||||
{
|
||||
"homeassistant": "bool",
|
||||
"addons": ["ADDON_SLUG"],
|
||||
"folders": ["FOLDER_NAME"],
|
||||
"password": "Optional"
|
||||
}
|
||||
```
|
||||
|
||||
### Host
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
- POST `/host/shutdown`
|
||||
|
||||
- POST `/host/reboot`
|
||||
|
||||
- GET `/host/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": "hostname|null",
|
||||
"features": ["shutdown", "reboot", "hostname", "services", "hassos"],
|
||||
"operating_system": "HassOS XY|Ubuntu 16.4|null",
|
||||
"kernel": "4.15.7|null",
|
||||
"chassis": "specific|null",
|
||||
"deployment": "stable|beta|dev|null",
|
||||
"cpe": "xy|null",
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"hostname": "",
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/reload`
|
||||
|
||||
#### Services
|
||||
|
||||
- GET `/host/services`
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"name": "xy.service",
|
||||
"description": "XY ...",
|
||||
"state": "active|"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/host/service/{unit}/stop`
|
||||
|
||||
- POST `/host/service/{unit}/start`
|
||||
|
||||
- POST `/host/service/{unit}/reload`
|
||||
|
||||
### HassOS
|
||||
|
||||
- GET `/hassos/info`
|
||||
```json
|
||||
{
|
||||
"version": "2.3",
|
||||
"version_cli": "7",
|
||||
"version_latest": "2.4",
|
||||
"version_cli_latest": "8",
|
||||
"board": "ova|rpi"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update`
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/update/cli`
|
||||
```json
|
||||
{
|
||||
"version": "optional"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/hassos/config/sync`
|
||||
|
||||
Load host configs from a USB stick.
|
||||
|
||||
### Hardware
|
||||
|
||||
- GET `/hardware/info`
|
||||
```json
|
||||
{
|
||||
"serial": ["/dev/xy"],
|
||||
"input": ["Input device name"],
|
||||
"disk": ["/dev/sdax"],
|
||||
"gpio": ["gpiochip0", "gpiochip100"],
|
||||
"audio": {
|
||||
"CARD_ID": {
|
||||
"name": "xy",
|
||||
"type": "microphone",
|
||||
"devices": [
|
||||
"chan_id": "channel ID",
|
||||
"chan_type": "type of device"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/hardware/audio`
|
||||
```json
|
||||
{
|
||||
"audio": {
|
||||
"input": {
|
||||
"0,0": "Mic"
|
||||
},
|
||||
"output": {
|
||||
"1,0": "Jack",
|
||||
"1,1": "HDMI"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Home Assistant
|
||||
|
||||
- GET `/homeassistant/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "INSTALL_VERSION",
|
||||
"last_version": "LAST_VERSION",
|
||||
"arch": "arch",
|
||||
"machine": "Image machine type",
|
||||
"ip_address": "ip address",
|
||||
"image": "str",
|
||||
"custom": "bool -> if custom image",
|
||||
"boot": "bool",
|
||||
"port": 8123,
|
||||
"ssl": "bool",
|
||||
"watchdog": "bool",
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/homeassistant/update`
|
||||
|
||||
Optional:
|
||||
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/homeassistant/logs`
|
||||
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/homeassistant/restart`
|
||||
- POST `/homeassistant/check`
|
||||
- POST `/homeassistant/start`
|
||||
- POST `/homeassistant/stop`
|
||||
- POST `/homeassistant/rebuild`
|
||||
|
||||
- POST `/homeassistant/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"image": "Optional|null",
|
||||
"last_version": "Optional for custom image|null",
|
||||
"port": "port for access hass",
|
||||
"ssl": "bool",
|
||||
"password": "",
|
||||
"refresh_token": "",
|
||||
"watchdog": "bool",
|
||||
"wait_boot": 600
|
||||
}
|
||||
```
|
||||
|
||||
Image with `null` and last_version with `null` reset this options.
|
||||
|
||||
- POST/GET `/homeassistant/api`
|
||||
|
||||
Proxy to real home-assistant instance.
|
||||
|
||||
- GET `/homeassistant/websocket`
|
||||
|
||||
Proxy to real websocket instance.
|
||||
|
||||
- GET `/homeassistant/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### RESTful for API addons
|
||||
|
||||
If an add-on will call itself, you can use `/addons/self/...`.
|
||||
|
||||
- GET `/addons`
|
||||
|
||||
Get all available addons.
|
||||
|
||||
```json
|
||||
{
|
||||
"addons": [
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xy",
|
||||
"description": "description",
|
||||
"repository": "core|local|REP_ID",
|
||||
"version": "LAST_VERSION",
|
||||
"installed": "none|INSTALL_VERSION",
|
||||
"detached": "bool",
|
||||
"available": "bool",
|
||||
"build": "bool",
|
||||
"url": "null|url",
|
||||
"icon": "bool",
|
||||
"logo": "bool"
|
||||
}
|
||||
],
|
||||
"repositories": [
|
||||
{
|
||||
"slug": "12345678",
|
||||
"name": "Repitory Name|unknown",
|
||||
"source": "URL_OF_REPOSITORY",
|
||||
"url": "WEBSITE|REPOSITORY",
|
||||
"maintainer": "BLA BLU <fla@dld.ch>|unknown"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/reload`
|
||||
- GET `/addons/{addon}/info`
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "xy bla",
|
||||
"slug": "xdssd_xybla",
|
||||
"hostname": "xdssd-xybla",
|
||||
"dns": [],
|
||||
"description": "description",
|
||||
"long_description": "null|markdown",
|
||||
"auto_update": "bool",
|
||||
"url": "null|url of addon",
|
||||
"detached": "bool",
|
||||
"available": "bool",
|
||||
"arch": ["armhf", "aarch64", "i386", "amd64"],
|
||||
"machine": "[raspberrypi2, tinker]",
|
||||
"homeassistant": "null|min Home Assistant version",
|
||||
"repository": "12345678|null",
|
||||
"version": "null|VERSION_INSTALLED",
|
||||
"last_version": "LAST_VERSION",
|
||||
"state": "none|started|stopped",
|
||||
"boot": "auto|manual",
|
||||
"build": "bool",
|
||||
"options": "{}",
|
||||
"network": "{}|null",
|
||||
"network_description": "{}|null",
|
||||
"host_network": "bool",
|
||||
"host_pid": "bool",
|
||||
"host_ipc": "bool",
|
||||
"host_dbus": "bool",
|
||||
"privileged": ["NET_ADMIN", "SYS_ADMIN"],
|
||||
"apparmor": "disable|default|profile",
|
||||
"devices": ["/dev/xy"],
|
||||
"udev": "bool",
|
||||
"auto_uart": "bool",
|
||||
"icon": "bool",
|
||||
"logo": "bool",
|
||||
"changelog": "bool",
|
||||
"hassio_api": "bool",
|
||||
"hassio_role": "default|homeassistant|manager|admin",
|
||||
"homeassistant_api": "bool",
|
||||
"auth_api": "bool",
|
||||
"full_access": "bool",
|
||||
"protected": "bool",
|
||||
"rating": "1-6",
|
||||
"stdin": "bool",
|
||||
"webui": "null|http(s)://[HOST]:port/xy/zx",
|
||||
"gpio": "bool",
|
||||
"kernel_modules": "bool",
|
||||
"devicetree": "bool",
|
||||
"docker_api": "bool",
|
||||
"audio": "bool",
|
||||
"audio_input": "null|0,0",
|
||||
"audio_output": "null|0,0",
|
||||
"services_role": "['service:access']",
|
||||
"discovery": "['service']",
|
||||
"ip_address": "ip address",
|
||||
"ingress": "bool",
|
||||
"ingress_entry": "null|/api/hassio_ingress/slug",
|
||||
"ingress_url": "null|/api/hassio_ingress/slug/entry.html",
|
||||
"ingress_port": "null|int",
|
||||
"ingress_panel": "null|bool"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/addons/{addon}/icon`
|
||||
|
||||
- GET `/addons/{addon}/logo`
|
||||
|
||||
- GET `/addons/{addon}/changelog`
|
||||
|
||||
- POST `/addons/{addon}/options`
|
||||
|
||||
```json
|
||||
{
|
||||
"boot": "auto|manual",
|
||||
"auto_update": "bool",
|
||||
"network": {
|
||||
"CONTAINER": "port|[ip, port]"
|
||||
},
|
||||
"options": {},
|
||||
"audio_output": "null|0,0",
|
||||
"audio_input": "null|0,0",
|
||||
"ingress_panel": "bool"
|
||||
}
|
||||
```
|
||||
|
||||
Reset custom network/audio/options, set it `null`.
|
||||
|
||||
- POST `/addons/{addon}/security`
|
||||
|
||||
This function is not callable by itself.
|
||||
|
||||
```json
|
||||
{
|
||||
"protected": "bool",
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/addons/{addon}/start`
|
||||
|
||||
- POST `/addons/{addon}/stop`
|
||||
|
||||
- POST `/addons/{addon}/install`
|
||||
|
||||
- POST `/addons/{addon}/uninstall`
|
||||
|
||||
- POST `/addons/{addon}/update`
|
||||
|
||||
- GET `/addons/{addon}/logs`
|
||||
|
||||
Output is the raw Docker log.
|
||||
|
||||
- POST `/addons/{addon}/restart`
|
||||
|
||||
- POST `/addons/{addon}/rebuild`
|
||||
|
||||
Only supported for local build addons
|
||||
|
||||
- POST `/addons/{addon}/stdin`
|
||||
|
||||
Write data to add-on stdin
|
||||
|
||||
- GET `/addons/{addon}/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### ingress
|
||||
|
||||
- POST `/ingress/session`
|
||||
|
||||
Create a new Session for access to ingress service.
|
||||
|
||||
```json
|
||||
{
|
||||
"session": "token"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/ingress/panels`
|
||||
|
||||
Return a list of enabled panels.
|
||||
|
||||
```json
|
||||
{
|
||||
"panels": {
|
||||
"addon_slug": {
|
||||
"enable": "boolean",
|
||||
"icon": "mdi:...",
|
||||
"title": "title",
|
||||
"admin": "boolean"
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
- VIEW `/ingress/{token}`
|
||||
|
||||
Ingress WebUI for this Add-on. The addon need support HASS Auth!
|
||||
Need ingress session as cookie.
|
||||
|
||||
### discovery
|
||||
|
||||
- GET `/discovery`
|
||||
```json
|
||||
{
|
||||
"discovery": [
|
||||
{
|
||||
"addon": "slug",
|
||||
"service": "name",
|
||||
"uuid": "uuid",
|
||||
"config": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/discovery/{UUID}`
|
||||
```json
|
||||
{
|
||||
"addon": "slug",
|
||||
"service": "name",
|
||||
"uuid": "uuid",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/discovery`
|
||||
```json
|
||||
{
|
||||
"service": "name",
|
||||
"config": {}
|
||||
}
|
||||
```
|
||||
|
||||
return:
|
||||
```json
|
||||
{
|
||||
"uuid": "uuid"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/discovery/{UUID}`
|
||||
|
||||
### Services
|
||||
|
||||
- GET `/services`
|
||||
```json
|
||||
{
|
||||
"services": [
|
||||
{
|
||||
"slug": "name",
|
||||
"available": "bool",
|
||||
"providers": "list"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
#### MQTT
|
||||
|
||||
- GET `/services/mqtt`
|
||||
```json
|
||||
{
|
||||
"addon": "name",
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/services/mqtt`
|
||||
```json
|
||||
{
|
||||
"host": "xy",
|
||||
"port": "8883",
|
||||
"ssl": "bool|optional",
|
||||
"username": "optional",
|
||||
"password": "optional",
|
||||
"protocol": "3.1.1"
|
||||
}
|
||||
```
|
||||
|
||||
- DEL `/services/mqtt`
|
||||
|
||||
### Misc
|
||||
|
||||
- GET `/info`
|
||||
```json
|
||||
{
|
||||
"supervisor": "version",
|
||||
"homeassistant": "version",
|
||||
"hassos": "null|version",
|
||||
"hostname": "name",
|
||||
"machine": "type",
|
||||
"arch": "arch",
|
||||
"supported_arch": ["arch1", "arch2"],
|
||||
"channel": "stable|beta|dev",
|
||||
"logging": "debug|info|warning|error|critical",
|
||||
"timezone": "Europe/Zurich"
|
||||
}
|
||||
```
|
||||
|
||||
### DNS
|
||||
|
||||
- GET `/dns/info`
|
||||
```json
|
||||
{
|
||||
"host": "ip-address",
|
||||
"version": "1",
|
||||
"latest_version": "2",
|
||||
"servers": ["dns://8.8.8.8"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/dns/options`
|
||||
```json
|
||||
{
|
||||
"servers": ["dns://8.8.8.8"]
|
||||
}
|
||||
```
|
||||
|
||||
- POST `/dns/update`
|
||||
```json
|
||||
{
|
||||
"version": "VERSION"
|
||||
}
|
||||
```
|
||||
|
||||
- GET `/dns/logs`
|
||||
|
||||
- GET `/dns/stats`
|
||||
```json
|
||||
{
|
||||
"cpu_percent": 0.0,
|
||||
"memory_usage": 283123,
|
||||
"memory_limit": 329392,
|
||||
"memory_percent": 1.4,
|
||||
"network_tx": 0,
|
||||
"network_rx": 0,
|
||||
"blk_read": 0,
|
||||
"blk_write": 0
|
||||
}
|
||||
```
|
||||
|
||||
### Auth / SSO API
|
||||
|
||||
You can use the user system on homeassistant. We handle this auth system on
|
||||
supervisor.
|
||||
|
||||
You can call post `/auth`
|
||||
|
||||
We support:
|
||||
- Json `{ "user|name": "...", "password": "..." }`
|
||||
- application/x-www-form-urlencoded `user|name=...&password=...`
|
||||
- BasicAuth
|
42
Dockerfile
42
Dockerfile
@@ -1,38 +1,40 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=10000 \
|
||||
SUPERVISOR_API=http://localhost
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
libffi \
|
||||
musl \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
libstdc++ \
|
||||
eudev \
|
||||
eudev-libs
|
||||
RUN \
|
||||
apk add --no-cache \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt .
|
||||
RUN export MAKEFLAGS="-j$(nproc)" \
|
||||
RUN \
|
||||
export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . hassio
|
||||
RUN pip3 install --no-cache-dir -e ./hassio \
|
||||
&& python3 -m compileall ./hassio/hassio
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
|
||||
# Initialize udev daemon, handle CMD
|
||||
COPY entry.sh /bin/
|
||||
ENTRYPOINT ["/bin/entry.sh"]
|
||||
|
||||
WORKDIR /
|
||||
CMD [ "python3", "-m", "hassio" ]
|
||||
COPY rootfs /
|
||||
|
4
LICENSE
4
LICENSE
@@ -178,7 +178,7 @@
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017 Pascal Vizeli
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@@ -1,3 +1,3 @@
|
||||
include LICENSE.md
|
||||
graft hassio
|
||||
graft supervisor
|
||||
recursive-exclude * *.py[co]
|
||||
|
30
README.md
30
README.md
@@ -1,30 +1,26 @@
|
||||
[](https://dev.azure.com/home-assistant/Hass.io/_build/latest?definitionId=2&branchName=dev)
|
||||
|
||||
# Hass.io
|
||||
# Home Assistant Supervisor
|
||||
|
||||
## First private cloud solution for home automation
|
||||
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||

|
||||
Home Assistant (former Hass.io) is a container-based system for managing your
|
||||
Home Assistant Core installation and related applications. The system is
|
||||
controlled via Home Assistant which communicates with the Supervisor. The
|
||||
Supervisor provides an API to manage the installation. This includes changing
|
||||
network settings or installing and updating software.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
Installation instructions can be found at https://home-assistant.io/hassio.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
The development of the Supervisor is not difficult but tricky.
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus with cli `hassio supervisor options --channel=dev`
|
||||
- You can use the builder to create your Supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Access a HassOS device or VM and pull your Supervisor.
|
||||
- Set the developer modus with the CLI tool: `ha supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Restart the service with `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||
For small bugfixes or improvements, make a PR. For significant changes open a RFC first, please. Thanks.
|
||||
|
53
azure-pipelines-release.yml
Normal file
53
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,53 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- "*"
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: "7.0"
|
||||
- group: docker
|
||||
|
||||
jobs:
|
||||
- job: "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.8"
|
||||
inputs:
|
||||
versionSpec: "3.8"
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: "Check version of branch/tag"
|
||||
- job: "Release"
|
||||
dependsOn:
|
||||
- "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: "Docker hub login"
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: "Install Builder"
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--generic $(Build.SourceBranchName) --all -t /data
|
||||
displayName: "Build Release"
|
27
azure-pipelines-wheels.yml
Normal file
27
azure-pipelines-wheels.yml
Normal file
@@ -0,0 +1,27 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.13.0-3.8-alpine3.12'
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: 'home-assistant/ci-azure'
|
||||
endpoint: 'home-assistant'
|
||||
|
||||
|
||||
jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;libffi-dev;openssl-dev'
|
||||
builderPip: 'Cython'
|
||||
skipBinary: 'aiohttp'
|
||||
wheelsRequirement: 'requirements.txt'
|
@@ -1,168 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- '*'
|
||||
exclude:
|
||||
- untagged*
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: basePythonTag
|
||||
value: '3.7-alpine3.10'
|
||||
- name: versionHadolint
|
||||
value: 'v1.16.3'
|
||||
- name: versionBuilder
|
||||
value: '4.4'
|
||||
- name: versionWheels
|
||||
value: '1.0-3.7-alpine3.10'
|
||||
- group: docker
|
||||
- group: wheels
|
||||
|
||||
|
||||
stages:
|
||||
|
||||
- stage: 'Test'
|
||||
jobs:
|
||||
- job: 'Tox'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install tox
|
||||
displayName: 'Install Tox'
|
||||
- script: tox
|
||||
displayName: 'Run Tox'
|
||||
- job: 'Black'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python $(python.version)'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install black
|
||||
displayName: 'Install black'
|
||||
- script: black --target-version py37 --check hassio tests
|
||||
displayName: 'Run Black'
|
||||
- job: 'JQ'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: 'Install JQ'
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: 'Run JQ'
|
||||
- job: 'Hadolint'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: 'Install Hadolint'
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: 'Run Hadolint'
|
||||
|
||||
- stage: 'Wheels'
|
||||
jobs:
|
||||
- job: 'Wheels'
|
||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support \
|
||||
curl
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: 'Initial cross build'
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: 'Install ssh key'
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: 'Install wheels builder'
|
||||
- script: |
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;libffi-dev;openssl-dev" \
|
||||
--index $(wheelsIndex) \
|
||||
--requirement requirements.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: 'Run wheels build'
|
||||
|
||||
- stage: 'Deploy'
|
||||
jobs:
|
||||
- job: 'VersionValidate'
|
||||
condition: or(startsWith(variables['Build.SourceBranch'], 'refs/tags'), eq(variables['Build.SourceBranchName'], 'dev'))
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: 'Check version of branch/tag'
|
||||
- job: 'Release'
|
||||
dependsOn:
|
||||
- 'VersionValidate'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--supervisor $(basePythonTag) --version $(Build.SourceBranchName) \
|
||||
--all -t /data --docker-hub homeassistant
|
||||
displayName: 'Build Release'
|
13
build.json
Normal file
13
build.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.8-alpine3.12",
|
||||
"armhf": "homeassistant/armhf-base-python:3.8-alpine3.12",
|
||||
"armv7": "homeassistant/armv7-base-python:3.8-alpine3.12",
|
||||
"amd64": "homeassistant/amd64-base-python:3.8-alpine3.12",
|
||||
"i386": "homeassistant/i386-base-python:3.8-alpine3.12"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "supervisor"
|
||||
}
|
||||
}
|
11
codecov.yaml
Normal file
11
codecov.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
codecov:
|
||||
branch: dev
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 40
|
||||
threshold: 0.09
|
||||
comment: false
|
||||
github_checks:
|
||||
annotations: false
|
13
entry.sh
13
entry.sh
@@ -1,13 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
udevd --daemon
|
||||
udevadm trigger
|
||||
|
||||
if CMD="$(command -v "$1")"; then
|
||||
shift
|
||||
exec "$CMD" "$@"
|
||||
else
|
||||
echo "Command not found: $1"
|
||||
exit 1
|
||||
fi
|
@@ -1 +0,0 @@
|
||||
"""Init file for Hass.io."""
|
@@ -1,42 +0,0 @@
|
||||
"""Init file for Hass.io hardware RESTful API."""
|
||||
import logging
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL,
|
||||
ATTR_DISK,
|
||||
ATTR_GPIO,
|
||||
ATTR_AUDIO,
|
||||
ATTR_INPUT,
|
||||
ATTR_OUTPUT,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(
|
||||
self.sys_hardware.serial_devices | self.sys_hardware.serial_by_id
|
||||
),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request):
|
||||
"""Show ALSA audio devices."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||
}
|
||||
}
|
@@ -1,57 +0,0 @@
|
||||
"""Init file for Hass.io HassOS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
from typing import Any, Awaitable, Dict
|
||||
|
||||
import voluptuous as vol
|
||||
from aiohttp import web
|
||||
|
||||
from ..const import (
|
||||
ATTR_BOARD,
|
||||
ATTR_VERSION,
|
||||
ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST,
|
||||
ATTR_VERSION_LATEST,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({vol.Optional(ATTR_VERSION): vol.Coerce(str)})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for HassOS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request: web.Request) -> Dict[str, Any]:
|
||||
"""Return HassOS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request: web.Request) -> None:
|
||||
"""Update HassOS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request: web.Request) -> None:
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request: web.Request) -> Awaitable[None]:
|
||||
"""Trigger config reload on HassOS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,20 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///./src/ingress-view/hassio-ingress-view.ts"],"names":["customElement","HassioIngressView","property","this","_addon","html","_templateObject2","name","ingress_url","_templateObject","changedProps","_get","_getPrototypeOf","prototype","call","has","addon","route","path","substr","oldRoute","get","oldAddon","undefined","_fetchData","_callee","addonSlug","_ref","_ref2","regeneratorRuntime","wrap","_context","prev","next","Promise","all","fetchHassioAddonInfo","hass","catch","Error","createHassioSession","sent","_slicedToArray","ingress","t0","console","error","alert","message","history","back","stop","css","_templateObject3","LitElement"],"mappings":"4gSAmBCA,YAAc,0CACTC,smBACHC,kEACAA,mEACAA,4EAED,WACE,OAAKC,KAAKC,OAMHC,YAAPC,IAC0BH,KAAKC,OAAOG,KACpBJ,KAAKC,OAAOI,aAPrBH,YAAPI,0CAYJ,SAAkBC,GAGhB,GAFAC,EAAAC,EApBEX,EAoBFY,WAAA,eAAAV,MAAAW,KAAAX,KAAmBO,GAEdA,EAAaK,IAAI,SAAtB,CAIA,IAAMC,EAAQb,KAAKc,MAAMC,KAAKC,OAAO,GAE/BC,EAAWV,EAAaW,IAAI,SAC5BC,EAAWF,EAAWA,EAASF,KAAKC,OAAO,QAAKI,EAElDP,GAASA,IAAUM,GACrBnB,KAAKqB,WAAWR,0FAIpB,SAAAS,EAAyBC,GAAzB,IAAAC,EAAAC,EAAAZ,EAAA,OAAAa,mBAAAC,KAAA,SAAAC,GAAA,cAAAA,EAAAC,KAAAD,EAAAE,MAAA,cAAAF,EAAAC,KAAA,EAAAD,EAAAE,KAAA,EAE0BC,QAAQC,IAAI,CAChCC,YAAqBjC,KAAKkC,KAAMX,GAAWY,MAAM,WAC/C,MAAM,IAAIC,MAAM,iCAElBC,YAAoBrC,KAAKkC,MAAMC,MAAM,WACnC,MAAM,IAAIC,MAAM,2CAPxB,UAAAZ,EAAAI,EAAAU,KAAAb,EAAAc,EAAAf,EAAA,IAEWX,EAFXY,EAAA,IAWee,QAXf,CAAAZ,EAAAE,KAAA,cAYY,IAAIM,MAAM,wCAZtB,OAeIpC,KAAKC,OAASY,EAflBe,EAAAE,KAAA,iBAAAF,EAAAC,KAAA,GAAAD,EAAAa,GAAAb,EAAA,SAkBIc,QAAQC,MAARf,EAAAa,IACAG,MAAMhB,EAAAa,GAAII,SAAW,mCACrBC,QAAQC,OApBZ,yBAAAnB,EAAAoB,SAAA1B,EAAAtB,KAAA,yRAwBA,WACE,OAAOiD,YAAPC,UA7D4BC","file":"chunk.5dd33a3a20657ed46a19.js","sourcesContent":["import {\n LitElement,\n customElement,\n property,\n TemplateResult,\n html,\n PropertyValues,\n CSSResult,\n css,\n} from \"lit-element\";\nimport { HomeAssistant, Route } from \"../../../src/types\";\nimport {\n createHassioSession,\n HassioAddonDetails,\n fetchHassioAddonInfo,\n} from \"../../../src/data/hassio\";\nimport \"../../../src/layouts/hass-loading-screen\";\nimport \"../../../src/layouts/hass-subpage\";\n\n@customElement(\"hassio-ingress-view\")\nclass HassioIngressView extends LitElement {\n @property() public hass!: HomeAssistant;\n @property() public route!: Route;\n @property() private _addon?: HassioAddonDetails;\n\n protected render(): TemplateResult | void {\n if (!this._addon) {\n return html`\n <hass-loading-screen></hass-loading-screen>\n `;\n }\n\n return html`\n <hass-subpage .header=${this._addon.name} hassio>\n <iframe src=${this._addon.ingress_url}></iframe>\n </hass-subpage>\n `;\n }\n\n protected updated(changedProps: PropertyValues) {\n super.firstUpdated(changedProps);\n\n if (!changedProps.has(\"route\")) {\n return;\n }\n\n const addon = this.route.path.substr(1);\n\n const oldRoute = changedProps.get(\"route\") as this[\"route\"] | undefined;\n const oldAddon = oldRoute ? oldRoute.path.substr(1) : undefined;\n\n if (addon && addon !== oldAddon) {\n this._fetchData(addon);\n }\n }\n\n private async _fetchData(addonSlug: string) {\n try {\n const [addon] = await Promise.all([\n fetchHassioAddonInfo(this.hass, addonSlug).catch(() => {\n throw new Error(\"Failed to fetch add-on info\");\n }),\n createHassioSession(this.hass).catch(() => {\n throw new Error(\"Failed to create an ingress session\");\n }),\n ]);\n\n if (!addon.ingress) {\n throw new Error(\"This add-on does not support ingress\");\n }\n\n this._addon = addon;\n } catch (err) {\n // tslint:disable-next-line\n console.error(err);\n alert(err.message || \"Unknown error starting ingress.\");\n history.back();\n }\n }\n\n static get styles(): CSSResult {\n return css`\n iframe {\n display: block;\n width: 100%;\n height: 100%;\n border: 0;\n }\n paper-icon-button {\n color: var(--text-primary-color);\n }\n `;\n }\n}\n\ndeclare global {\n interface HTMLElementTagNameMap {\n \"hassio-ingress-view\": HassioIngressView;\n }\n}\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,21 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[9],{101:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(124),i=t.n(e),o=t(126),u=t.n(o),a=i.a,c=u.a}}]);
|
||||
//# sourceMappingURL=chunk.7f8cce5798f837214ef8.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,KAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,KAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.7f8cce5798f837214ef8.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,31 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
!function(e){function n(n){for(var t,o,a=n[0],i=n[1],f=0,c=[];f<a.length;f++)o=a[f],r[o]&&c.push(r[o][0]),r[o]=0;for(t in i)Object.prototype.hasOwnProperty.call(i,t)&&(e[t]=i[t]);for(u&&u(n);c.length;)c.shift()()}var t={},r={4:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var a=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=a);var i,f=document.createElement("script");f.charset="utf-8",f.timeout=120,o.nc&&f.setAttribute("nonce",o.nc),f.src=function(e){return o.p+"chunk."+{0:"7f411ffa9df152cb8f05",1:"598ae99dfd641ab3a30c",2:"af7784dbf07df8e24819",3:"b15efbd4fb2c8cac0ad4",5:"87d3a6d0178fb26762cf",6:"6f4702eafe52425373ed",7:"5dd33a3a20657ed46a19",8:"7c785f796f428abae18d",9:"7f8cce5798f837214ef8",10:"04bcaa18b59728e10be9",11:"9d7374dae6137783dda4",12:"6685a7f98b13655ab808",13:"f1156b978f6f3143a651"}[e]+".js"}(e),i=function(n){f.onerror=f.onload=null,clearTimeout(u);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),a=n&&n.target&&n.target.src,i=new Error("Loading chunk "+e+" failed.\n("+o+": "+a+")");i.type=o,i.request=a,t[1](i)}r[e]=void 0}};var u=setTimeout(function(){i({type:"timeout",target:f})},12e4);f.onerror=f.onload=i,document.head.appendChild(f)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var a=window.webpackJsonp=window.webpackJsonp||[],i=a.push.bind(a);a.push=n,a=a.slice();for(var f=0;f<a.length;f++)n(a[f]);var u=i;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(1),t.e(6)]).then(t.bind(null,2)),Promise.all([t.e(1),t.e(12),t.e(8)]).then(t.bind(null,1))});var r=document.createElement("style");r.innerHTML="\nbody {\n font-family: Roboto, sans-serif;\n -moz-osx-font-smoothing: grayscale;\n -webkit-font-smoothing: antialiased;\n font-weight: 400;\n margin: 0;\n padding: 0;\n height: 100vh;\n}\n",document.head.appendChild(r)}]);
|
||||
//# sourceMappingURL=entrypoint.js.map
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,38 +0,0 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Hass.io</title>
|
||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||
<style>
|
||||
body {
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
||||
</head>
|
||||
<body>
|
||||
<hassio-app></hassio-app>
|
||||
<script>
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.write(e.outerHTML);
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
</script>
|
||||
<!--
|
||||
Disabled while we make Home Assistant able to serve the right files.
|
||||
<script src="./app.js"></script>
|
||||
-->
|
||||
<link rel='import' href='./hassio-app.html'>
|
||||
</body>
|
||||
</html>
|
Binary file not shown.
@@ -1,100 +0,0 @@
|
||||
"""Init file for Hass.io util for RESTful API."""
|
||||
import json
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT,
|
||||
JSON_DATA,
|
||||
JSON_MESSAGE,
|
||||
RESULT_OK,
|
||||
RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY,
|
||||
)
|
||||
from ..exceptions import HassioError, APIError, APIForbidden
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def json_loads(data):
|
||||
"""Extract json from string with support for '' and None."""
|
||||
if not data:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
raise APIError("Invalid json")
|
||||
|
||||
|
||||
def api_process(method):
|
||||
"""Wrap function with true/false calls to rest api."""
|
||||
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return API information."""
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except (APIError, APIForbidden) as err:
|
||||
return api_return_error(message=str(err))
|
||||
except HassioError:
|
||||
return api_return_error(message="Unknown Error, see logs")
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
if isinstance(answer, web.Response):
|
||||
return answer
|
||||
elif isinstance(answer, bool) and not answer:
|
||||
return api_return_error()
|
||||
return api_return_ok()
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
def api_process_raw(content):
|
||||
"""Wrap content_type into function."""
|
||||
|
||||
def wrap_method(method):
|
||||
"""Wrap function with raw output to rest api."""
|
||||
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
msg_data = await method(api, *args, **kwargs)
|
||||
msg_type = content
|
||||
except (APIError, APIForbidden) as err:
|
||||
msg_data = str(err).encode()
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
except HassioError:
|
||||
msg_data = b""
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
|
||||
return web.Response(body=msg_data, content_type=msg_type)
|
||||
|
||||
return wrap_api
|
||||
|
||||
return wrap_method
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return an API error message."""
|
||||
return web.json_response(
|
||||
{JSON_RESULT: RESULT_ERROR, JSON_MESSAGE: message}, status=400
|
||||
)
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
"""Return an API ok answer."""
|
||||
return web.json_response({JSON_RESULT: RESULT_OK, JSON_DATA: data or {}})
|
||||
|
||||
|
||||
async def api_validate(schema, request):
|
||||
"""Validate request data with schema."""
|
||||
data = await request.json(loads=json_loads)
|
||||
try:
|
||||
data = schema(data)
|
||||
except vol.Invalid as ex:
|
||||
raise APIError(humanize_error(data, ex)) from None
|
||||
|
||||
return data
|
@@ -1,97 +0,0 @@
|
||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||
import logging
|
||||
import hashlib
|
||||
|
||||
from .const import FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON
|
||||
from .coresys import CoreSysAttributes
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_AUTH_CONFIG
|
||||
from .exceptions import AuthError, HomeAssistantAPIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Auth(JsonConfig, CoreSysAttributes):
|
||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_AUTH, SCHEMA_AUTH_CONFIG)
|
||||
self.coresys = coresys
|
||||
|
||||
def _check_cache(self, username, password):
|
||||
"""Check password in cache."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) == password_h:
|
||||
_LOGGER.info("Cache hit for %s", username)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("No cache hit for %s", username)
|
||||
return False
|
||||
|
||||
def _update_cache(self, username, password):
|
||||
"""Cache a username, password."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) == password_h:
|
||||
return
|
||||
|
||||
self._data[username_h] = password_h
|
||||
self.save_data()
|
||||
|
||||
def _dismatch_cache(self, username, password):
|
||||
"""Remove user from cache."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) != password_h:
|
||||
return
|
||||
|
||||
self._data.pop(username_h, None)
|
||||
self.save_data()
|
||||
|
||||
async def check_login(self, addon, username, password):
|
||||
"""Check username login."""
|
||||
if password is None:
|
||||
_LOGGER.error("None as password is not supported!")
|
||||
raise AuthError()
|
||||
_LOGGER.info("Auth request from %s for %s", addon.slug, username)
|
||||
|
||||
# Check API state
|
||||
if not await self.sys_homeassistant.check_api_state():
|
||||
_LOGGER.info("Home Assistant not running, check cache")
|
||||
return self._check_cache(username, password)
|
||||
|
||||
try:
|
||||
async with self.sys_homeassistant.make_request(
|
||||
"post",
|
||||
"api/hassio_auth",
|
||||
json={
|
||||
ATTR_USERNAME: username,
|
||||
ATTR_PASSWORD: password,
|
||||
ATTR_ADDON: addon.slug,
|
||||
},
|
||||
) as req:
|
||||
|
||||
if req.status == 200:
|
||||
_LOGGER.info("Success login from %s", username)
|
||||
self._update_cache(username, password)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("Wrong login from %s", username)
|
||||
self._dismatch_cache(username, password)
|
||||
return False
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Can't request auth on Home Assistant!")
|
||||
|
||||
raise AuthError()
|
||||
|
||||
|
||||
def _rehash(value, salt2=""):
|
||||
"""Rehash a value."""
|
||||
for idx in range(1, 20):
|
||||
value = hashlib.sha256(f"{value}{idx}{salt2}".encode()).hexdigest()
|
||||
return value
|
@@ -1,244 +0,0 @@
|
||||
"""Bootstrap Hass.io."""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import signal
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .arch import CpuArch
|
||||
from .auth import Auth
|
||||
from .const import CHANNEL_DEV, SOCKET_DOCKER
|
||||
from .core import HassIO
|
||||
from .coresys import CoreSys
|
||||
from .dbus import DBusManager
|
||||
from .discovery import Discovery
|
||||
from .dns import CoreDNS
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .ingress import Ingress
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .store import StoreManager
|
||||
from .supervisor import Supervisor
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
from .utils.dt import fetch_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENV_SHARE = "SUPERVISOR_SHARE"
|
||||
ENV_NAME = "SUPERVISOR_NAME"
|
||||
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
|
||||
|
||||
MACHINE_ID = Path("/etc/machine-id")
|
||||
|
||||
|
||||
async def initialize_coresys():
|
||||
"""Initialize HassIO coresys/objects."""
|
||||
coresys = CoreSys()
|
||||
|
||||
# Initialize core objects
|
||||
coresys.core = HassIO(coresys)
|
||||
coresys.dns = CoreDNS(coresys)
|
||||
coresys.arch = CpuArch(coresys)
|
||||
coresys.auth = Auth(coresys)
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
coresys.homeassistant = HomeAssistant(coresys)
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.ingress = Ingress(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.store = StoreManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.hassos = HassOS(coresys)
|
||||
|
||||
# bootstrap config
|
||||
initialize_system_data(coresys)
|
||||
|
||||
# Set Machine/Host ID
|
||||
if MACHINE_ID.exists():
|
||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
||||
|
||||
# Init TimeZone
|
||||
if coresys.config.timezone == "UTC":
|
||||
coresys.config.timezone = await fetch_timezone(coresys.websession)
|
||||
|
||||
return coresys
|
||||
|
||||
|
||||
def initialize_system_data(coresys: CoreSys):
|
||||
"""Set up the default configuration and create folders."""
|
||||
config = coresys.config
|
||||
|
||||
# Home Assistant configuration folder
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home Assistant configuration folder %s", config.path_homeassistant
|
||||
)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
if not config.path_ssl.is_dir():
|
||||
_LOGGER.info("Create Hass.io SSL/TLS folder %s", config.path_ssl)
|
||||
config.path_ssl.mkdir()
|
||||
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s", config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on local repository folder %s", config.path_addons_local
|
||||
)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Hass.io Add-on git repositories folder %s", config.path_addons_git
|
||||
)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
if not config.path_tmp.is_dir():
|
||||
_LOGGER.info("Create Hass.io temp folder %s", config.path_tmp)
|
||||
config.path_tmp.mkdir(parents=True)
|
||||
|
||||
# hassio backup folder
|
||||
if not config.path_backup.is_dir():
|
||||
_LOGGER.info("Create Hass.io backup folder %s", config.path_backup)
|
||||
config.path_backup.mkdir()
|
||||
|
||||
# share folder
|
||||
if not config.path_share.is_dir():
|
||||
_LOGGER.info("Create Hass.io share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
# apparmor folder
|
||||
if not config.path_apparmor.is_dir():
|
||||
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
||||
config.path_apparmor.mkdir()
|
||||
|
||||
# dns folder
|
||||
if not config.path_dns.is_dir():
|
||||
_LOGGER.info("Create Hass.io DNS folder %s", config.path_dns)
|
||||
config.path_dns.mkdir()
|
||||
|
||||
# Update log level
|
||||
coresys.config.modify_log_level()
|
||||
|
||||
# Check if ENV is in development mode
|
||||
if bool(os.environ.get("SUPERVISOR_DEV", 0)):
|
||||
_LOGGER.warning("SUPERVISOR_DEV is set")
|
||||
coresys.updater.channel = CHANNEL_DEV
|
||||
coresys.config.logging = "debug"
|
||||
coresys.config.debug = True
|
||||
|
||||
|
||||
def migrate_system_env(coresys: CoreSys):
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
if old_build.is_dir():
|
||||
try:
|
||||
old_build.rmdir()
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't cleanup old Add-on build directory")
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||
datefmt = "%y-%m-%d %H:%M:%S"
|
||||
|
||||
# suppress overly verbose logs from libraries that aren't helpful
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
|
||||
logging.getLogger().handlers[0].setFormatter(
|
||||
ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
"DEBUG": "cyan",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def check_environment():
|
||||
"""Check if all environment are exists."""
|
||||
# check environment variables
|
||||
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
|
||||
try:
|
||||
os.environ[key]
|
||||
except KeyError:
|
||||
_LOGGER.fatal("Can't find %s in env!", key)
|
||||
return False
|
||||
|
||||
# check docker socket
|
||||
if not SOCKET_DOCKER.is_socket():
|
||||
_LOGGER.fatal("Can't find Docker socket!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which("socat"):
|
||||
_LOGGER.fatal("Can't find socat!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which("gdbus"):
|
||||
_LOGGER.fatal("Can't find gdbus!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM and SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGTERM, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGHUP, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGINT, lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
||||
|
||||
|
||||
def supervisor_debugger(coresys: CoreSys) -> None:
|
||||
"""Setup debugger if needed."""
|
||||
if not coresys.config.debug:
|
||||
return
|
||||
import ptvsd
|
||||
|
||||
_LOGGER.info("Initialize Hass.io debugger")
|
||||
|
||||
ptvsd.enable_attach(address=("0.0.0.0", 33333), redirect_output=True)
|
||||
if coresys.config.debug_block:
|
||||
_LOGGER.info("Wait until debugger is attached")
|
||||
ptvsd.wait_for_attach()
|
242
hassio/config.py
242
hassio/config.py
@@ -1,242 +0,0 @@
|
||||
"""Bootstrap Hass.io."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
from .const import (
|
||||
ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_DEBUG,
|
||||
ATTR_DEBUG_BLOCK,
|
||||
ATTR_LAST_BOOT,
|
||||
ATTR_LOGGING,
|
||||
ATTR_TIMEZONE,
|
||||
ATTR_WAIT_BOOT,
|
||||
FILE_HASSIO_CONFIG,
|
||||
HASSIO_DATA,
|
||||
)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HOMEASSISTANT_CONFIG = PurePath("homeassistant")
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
|
||||
ADDONS_CORE = PurePath("addons/core")
|
||||
ADDONS_LOCAL = PurePath("addons/local")
|
||||
ADDONS_GIT = PurePath("addons/git")
|
||||
ADDONS_DATA = PurePath("addons/data")
|
||||
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
APPARMOR_DATA = PurePath("apparmor")
|
||||
DNS_DATA = PurePath("dns")
|
||||
|
||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||
|
||||
|
||||
class CoreConfig(JsonConfig):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
return self._data[ATTR_TIMEZONE]
|
||||
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self) -> int:
|
||||
"""Return wait time for auto boot stages."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value: int):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def debug(self) -> bool:
|
||||
"""Return True if ptvsd is enabled."""
|
||||
return self._data[ATTR_DEBUG]
|
||||
|
||||
@debug.setter
|
||||
def debug(self, value: bool):
|
||||
"""Set debug mode."""
|
||||
self._data[ATTR_DEBUG] = value
|
||||
|
||||
@property
|
||||
def debug_block(self) -> bool:
|
||||
"""Return True if ptvsd should waiting."""
|
||||
return self._data[ATTR_DEBUG_BLOCK]
|
||||
|
||||
@debug_block.setter
|
||||
def debug_block(self, value: bool):
|
||||
"""Set debug wait mode."""
|
||||
self._data[ATTR_DEBUG_BLOCK] = value
|
||||
|
||||
@property
|
||||
def logging(self) -> str:
|
||||
"""Return log level of system."""
|
||||
return self._data[ATTR_LOGGING]
|
||||
|
||||
@logging.setter
|
||||
def logging(self, value: str):
|
||||
"""Set system log level."""
|
||||
self._data[ATTR_LOGGING] = value
|
||||
self.modify_log_level()
|
||||
|
||||
def modify_log_level(self) -> None:
|
||||
"""Change log level."""
|
||||
lvl = getattr(logging, self.logging.upper())
|
||||
logging.getLogger("hassio").setLevel(lvl)
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
"""Return last boot datetime."""
|
||||
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
||||
|
||||
boot_time = parse_datetime(boot_str)
|
||||
if not boot_time:
|
||||
return datetime.utcfromtimestamp(1)
|
||||
return boot_time
|
||||
|
||||
@last_boot.setter
|
||||
def last_boot(self, value):
|
||||
"""Set last boot datetime."""
|
||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||
|
||||
@property
|
||||
def path_hassio(self):
|
||||
"""Return Hass.io data path."""
|
||||
return HASSIO_DATA
|
||||
|
||||
@property
|
||||
def path_extern_hassio(self):
|
||||
"""Return Hass.io data path external for Docker."""
|
||||
return PurePath(os.environ["SUPERVISOR_SHARE"])
|
||||
|
||||
@property
|
||||
def path_extern_homeassistant(self):
|
||||
"""Return config path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
||||
|
||||
@property
|
||||
def path_homeassistant(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
||||
|
||||
@property
|
||||
def path_extern_ssl(self):
|
||||
"""Return SSL path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
||||
|
||||
@property
|
||||
def path_ssl(self):
|
||||
"""Return SSL path inside supervisor."""
|
||||
return Path(HASSIO_DATA, HASSIO_SSL)
|
||||
|
||||
@property
|
||||
def path_addons_core(self):
|
||||
"""Return git path for core Add-ons."""
|
||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
||||
|
||||
@property
|
||||
def path_addons_git(self):
|
||||
"""Return path for Git Add-on."""
|
||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
||||
|
||||
@property
|
||||
def path_addons_local(self):
|
||||
"""Return path for custom Add-ons."""
|
||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_extern_addons_local(self):
|
||||
"""Return path for custom Add-ons."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_addons_data(self):
|
||||
"""Return root Add-on data folder."""
|
||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_addons_data(self):
|
||||
"""Return root add-on data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_tmp(self):
|
||||
"""Return Hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_tmp(self):
|
||||
"""Return Hass.io temp folder for Docker."""
|
||||
return PurePath(self.path_extern_hassio, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
return Path(HASSIO_DATA, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_backup(self):
|
||||
"""Return root backup data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_share(self):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return root Apparmor profile folder."""
|
||||
return Path(HASSIO_DATA, APPARMOR_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_dns(self):
|
||||
"""Return dns path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, DNS_DATA))
|
||||
|
||||
@property
|
||||
def path_dns(self):
|
||||
"""Return dns path inside supervisor."""
|
||||
return Path(HASSIO_DATA, DNS_DATA)
|
||||
|
||||
@property
|
||||
def addons_repositories(self):
|
||||
"""Return list of custom Add-on repositories."""
|
||||
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
||||
|
||||
def add_addon_repository(self, repo):
|
||||
"""Add a custom repository to list."""
|
||||
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
189
hassio/core.py
189
hassio/core.py
@@ -1,189 +0,0 @@
|
||||
"""Main file for Hass.io."""
|
||||
from contextlib import suppress
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (
|
||||
STARTUP_SYSTEM,
|
||||
STARTUP_SERVICES,
|
||||
STARTUP_APPLICATION,
|
||||
STARTUP_INITIALIZE,
|
||||
)
|
||||
from .exceptions import HassioError, HomeAssistantError, SupervisorUpdateError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassIO(CoreSysAttributes):
|
||||
"""Main object of Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Hass.io object."""
|
||||
self.coresys = coresys
|
||||
|
||||
async def connect(self):
|
||||
"""Connect Supervisor container."""
|
||||
await self.sys_supervisor.load()
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# Load CoreDNS
|
||||
await self.sys_dns.load()
|
||||
|
||||
# Load DBus
|
||||
await self.sys_dbus.load()
|
||||
|
||||
# Load Host
|
||||
await self.sys_host.load()
|
||||
|
||||
# Load Home Assistant
|
||||
await self.sys_homeassistant.load()
|
||||
|
||||
# Load CPU/Arch
|
||||
await self.sys_arch.load()
|
||||
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Stores
|
||||
await self.sys_store.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
# rest api views
|
||||
await self.sys_api.load()
|
||||
|
||||
# load last available data
|
||||
await self.sys_updater.load()
|
||||
|
||||
# load last available data
|
||||
await self.sys_snapshots.load()
|
||||
|
||||
# load services
|
||||
await self.sys_services.load()
|
||||
|
||||
# Load discovery
|
||||
await self.sys_discovery.load()
|
||||
|
||||
# Load ingress
|
||||
await self.sys_ingress.load()
|
||||
|
||||
async def start(self):
|
||||
"""Start Hass.io orchestration."""
|
||||
await self.sys_api.start()
|
||||
|
||||
# on release channel, try update itself
|
||||
if self.sys_supervisor.need_update:
|
||||
try:
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
||||
else:
|
||||
await self.sys_supervisor.update()
|
||||
except SupervisorUpdateError:
|
||||
_LOGGER.fatal(
|
||||
"Can't update supervisor! This will break some Add-ons or affect "
|
||||
"future version of Home Assistant!"
|
||||
)
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
||||
_LOGGER.info("Hass.io reboot detected")
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self.sys_services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
if self.sys_homeassistant.boot:
|
||||
with suppress(HomeAssistantError):
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
# start addon mark as application
|
||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self._update_last_boot()
|
||||
|
||||
finally:
|
||||
# Add core tasks into scheduler
|
||||
await self.sys_tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.sys_homeassistant.version == "landingpage":
|
||||
self.sys_create_task(self.sys_homeassistant.install())
|
||||
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.sys_scheduler.suspend = True
|
||||
|
||||
# store new last boot / prevent time adjustments
|
||||
self._update_last_boot()
|
||||
|
||||
# process async stop tasks
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await asyncio.wait(
|
||||
[
|
||||
self.sys_api.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close(),
|
||||
self.sys_ingress.unload(),
|
||||
self.sys_dns.unload(),
|
||||
]
|
||||
)
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Force Shutdown!")
|
||||
|
||||
_LOGGER.info("Hass.io is down")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown all running containers in correct order."""
|
||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
||||
|
||||
# Close Home Assistant
|
||||
with suppress(HassioError):
|
||||
await self.sys_homeassistant.stop()
|
||||
|
||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
||||
|
||||
def _update_last_boot(self):
|
||||
"""Update last boot time."""
|
||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
||||
self.sys_config.save_data()
|
||||
|
||||
async def repair(self):
|
||||
"""Repair system integrity."""
|
||||
_LOGGER.info("Start repairing of Hass.io Environment")
|
||||
await self.sys_run_in_executor(self.sys_docker.repair)
|
||||
|
||||
# Restore core functionality
|
||||
await self.sys_dns.repair()
|
||||
await self.sys_addons.repair()
|
||||
await self.sys_homeassistant.repair()
|
||||
|
||||
# Fix HassOS specific
|
||||
if self.sys_hassos.available:
|
||||
await self.sys_hassos.repair_cli()
|
||||
|
||||
# Tag version for latest
|
||||
await self.sys_supervisor.repair()
|
||||
_LOGGER.info("Finished repairing of Hass.io Environment")
|
@@ -1,58 +0,0 @@
|
||||
{
|
||||
"raspberrypi": [
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi2": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi4": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi4-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"tinker": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"odroid-c2": [
|
||||
"aarch64"
|
||||
],
|
||||
"odroid-xu": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"orangepi-prime": [
|
||||
"aarch64"
|
||||
],
|
||||
"qemux86": [
|
||||
"i386"
|
||||
],
|
||||
"qemux86-64": [
|
||||
"amd64",
|
||||
"i386"
|
||||
],
|
||||
"qemuarm": [
|
||||
"armhf"
|
||||
],
|
||||
"qemuarm-64": [
|
||||
"aarch64"
|
||||
],
|
||||
"intel-nuc": [
|
||||
"amd64",
|
||||
"i386"
|
||||
]
|
||||
}
|
@@ -1,17 +0,0 @@
|
||||
pcm.!default {
|
||||
type asym
|
||||
capture.pcm "mic"
|
||||
playback.pcm "speaker"
|
||||
}
|
||||
pcm.mic {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$input"
|
||||
}
|
||||
}
|
||||
pcm.speaker {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$output"
|
||||
}
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"raspberrypi3": {
|
||||
"bcm2835 - bcm2835 ALSA": {
|
||||
"0,0": "Raspberry Jack",
|
||||
"0,1": "Raspberry HDMI"
|
||||
},
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi2": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
}
|
||||
}
|
@@ -1,9 +0,0 @@
|
||||
.:53 {
|
||||
log
|
||||
hosts /config/hosts {
|
||||
fallthrough
|
||||
}
|
||||
forward . $servers {
|
||||
health_check 10s
|
||||
}
|
||||
}
|
@@ -1,39 +0,0 @@
|
||||
"""D-Bus interface objects."""
|
||||
|
||||
from .systemd import Systemd
|
||||
from .hostname import Hostname
|
||||
from .rauc import Rauc
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class DBusManager(CoreSysAttributes):
|
||||
"""A DBus Interface handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize D-Bus interface."""
|
||||
self.coresys = coresys
|
||||
|
||||
self._systemd = Systemd()
|
||||
self._hostname = Hostname()
|
||||
self._rauc = Rauc()
|
||||
|
||||
@property
|
||||
def systemd(self):
|
||||
"""Return the systemd interface."""
|
||||
return self._systemd
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return the hostname interface."""
|
||||
return self._hostname
|
||||
|
||||
@property
|
||||
def rauc(self):
|
||||
"""Return the rauc interface."""
|
||||
return self._rauc
|
||||
|
||||
async def load(self):
|
||||
"""Connect interfaces to D-Bus."""
|
||||
await self.systemd.connect()
|
||||
await self.hostname.connect()
|
||||
await self.rauc.connect()
|
@@ -1,39 +0,0 @@
|
||||
"""D-Bus interface for hostname."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = "org.freedesktop.hostname1"
|
||||
DBUS_OBJECT = "/org/freedesktop/hostname1"
|
||||
|
||||
|
||||
class Hostname(DBusInterface):
|
||||
"""Handle D-Bus interface for hostname/system."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to system's D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to hostname")
|
||||
|
||||
@dbus_connected
|
||||
def set_static_hostname(self, hostname):
|
||||
"""Change local hostname.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.SetStaticHostname(hostname, False)
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return local host informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(DBUS_NAME)
|
@@ -1,55 +0,0 @@
|
||||
"""D-Bus interface for rauc."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = "de.pengutronix.rauc"
|
||||
DBUS_OBJECT = "/"
|
||||
|
||||
|
||||
class Rauc(DBusInterface):
|
||||
"""Handle D-Bus interface for rauc."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to rauc")
|
||||
|
||||
@dbus_connected
|
||||
def install(self, raucb_file):
|
||||
"""Install rauc bundle file.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.Install(raucb_file)
|
||||
|
||||
@dbus_connected
|
||||
def get_slot_status(self):
|
||||
"""Get slot status.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.GetSlotStatus()
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return rauc informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
||||
|
||||
@dbus_connected
|
||||
def signal_completed(self):
|
||||
"""Return a signal wrapper for completed signal.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
342
hassio/dns.py
342
hassio/dns.py
@@ -1,342 +0,0 @@
|
||||
"""Home Assistant control object."""
|
||||
import asyncio
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from ipaddress import IPv4Address, AddressValueError
|
||||
from pathlib import Path
|
||||
from string import Template
|
||||
from typing import Awaitable, Dict, List, Optional
|
||||
|
||||
from .const import ATTR_SERVERS, ATTR_VERSION, DNS_SERVERS, FILE_HASSIO_DNS, DNS_SUFFIX
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .docker.dns import DockerDNS
|
||||
from .docker.stats import DockerStats
|
||||
from .exceptions import CoreDNSError, CoreDNSUpdateError, DockerAPIError
|
||||
from .misc.forwarder import DNSForward
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_DNS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
COREDNS_TMPL: Path = Path(__file__).parents[0].joinpath("data/coredns.tmpl")
|
||||
RESOLV_CONF: Path = Path("/etc/resolv.conf")
|
||||
|
||||
|
||||
class CoreDNS(JsonConfig, CoreSysAttributes):
|
||||
"""Home Assistant core object for handle it."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize hass object."""
|
||||
super().__init__(FILE_HASSIO_DNS, SCHEMA_DNS_CONFIG)
|
||||
self.coresys: CoreSys = coresys
|
||||
self.instance: DockerDNS = DockerDNS(coresys)
|
||||
self.forwarder: DNSForward = DNSForward()
|
||||
|
||||
self._hosts: Dict[IPv4Address, List[str]] = {}
|
||||
|
||||
@property
|
||||
def corefile(self) -> Path:
|
||||
"""Return Path to corefile."""
|
||||
return Path(self.sys_config.path_dns, "corefile")
|
||||
|
||||
@property
|
||||
def hosts(self) -> Path:
|
||||
"""Return Path to corefile."""
|
||||
return Path(self.sys_config.path_dns, "hosts")
|
||||
|
||||
@property
|
||||
def servers(self) -> List[str]:
|
||||
"""Return list of DNS servers."""
|
||||
return self._data[ATTR_SERVERS]
|
||||
|
||||
@servers.setter
|
||||
def servers(self, value: List[str]) -> None:
|
||||
"""Return list of DNS servers."""
|
||||
self._data[ATTR_SERVERS] = value
|
||||
|
||||
@property
|
||||
def version(self) -> Optional[str]:
|
||||
"""Return current version of DNS."""
|
||||
return self._data.get(ATTR_VERSION)
|
||||
|
||||
@version.setter
|
||||
def version(self, value: str) -> None:
|
||||
"""Return current version of DNS."""
|
||||
self._data[ATTR_VERSION] = value
|
||||
|
||||
@property
|
||||
def latest_version(self) -> Optional[str]:
|
||||
"""Return latest version of CoreDNS."""
|
||||
return self.sys_updater.version_dns
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool:
|
||||
"""Return True if a task is in progress."""
|
||||
return self.instance.in_progress
|
||||
|
||||
@property
|
||||
def need_update(self) -> bool:
|
||||
"""Return True if an update is available."""
|
||||
return self.version != self.latest_version
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load DNS setup."""
|
||||
with suppress(CoreDNSError):
|
||||
self._import_hosts()
|
||||
|
||||
# Check CoreDNS state
|
||||
try:
|
||||
# Evaluate Version if we lost this information
|
||||
if not self.version:
|
||||
self.version = await self.instance.get_latest_version(key=int)
|
||||
|
||||
await self.instance.attach(tag=self.version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.info(
|
||||
"No CoreDNS plugin Docker image %s found.", self.instance.image
|
||||
)
|
||||
|
||||
# Install CoreDNS
|
||||
with suppress(CoreDNSError):
|
||||
await self.install()
|
||||
else:
|
||||
self.version = self.instance.version
|
||||
self.save_data()
|
||||
|
||||
# Start DNS forwarder
|
||||
self.sys_create_task(self.forwarder.start(self.sys_docker.network.dns))
|
||||
|
||||
with suppress(CoreDNSError):
|
||||
self._update_local_resolv()
|
||||
|
||||
# Start is not Running
|
||||
if await self.instance.is_running():
|
||||
return
|
||||
await self.start()
|
||||
|
||||
async def unload(self) -> None:
|
||||
"""Unload DNS forwarder."""
|
||||
await self.forwarder.stop()
|
||||
|
||||
async def install(self) -> None:
|
||||
"""Install CoreDNS."""
|
||||
_LOGGER.info("Setup CoreDNS plugin")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.latest_version:
|
||||
await self.sys_updater.reload()
|
||||
|
||||
if self.latest_version:
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.install(self.latest_version)
|
||||
break
|
||||
_LOGGER.warning("Error on install CoreDNS plugin. Retry in 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
_LOGGER.info("CoreDNS plugin now installed")
|
||||
self.version = self.instance.version
|
||||
self.save_data()
|
||||
|
||||
await self.start()
|
||||
|
||||
async def update(self, version: Optional[str] = None) -> None:
|
||||
"""Update CoreDNS plugin."""
|
||||
version = version or self.latest_version
|
||||
|
||||
if version == self.version:
|
||||
_LOGGER.warning("Version %s is already installed for CoreDNS", version)
|
||||
return
|
||||
|
||||
try:
|
||||
await self.instance.update(version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.error("CoreDNS update fails")
|
||||
raise CoreDNSUpdateError() from None
|
||||
else:
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.cleanup()
|
||||
|
||||
self.version = version
|
||||
self.save_data()
|
||||
|
||||
# Start CoreDNS
|
||||
await self.start()
|
||||
|
||||
async def restart(self) -> None:
|
||||
"""Restart CoreDNS plugin."""
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.stop()
|
||||
await self.start()
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Run CoreDNS."""
|
||||
self._write_corefile()
|
||||
|
||||
# Start Instance
|
||||
_LOGGER.info("Start CoreDNS plugin")
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerAPIError:
|
||||
_LOGGER.error("Can't start CoreDNS plugin")
|
||||
raise CoreDNSError() from None
|
||||
|
||||
def reset(self) -> None:
|
||||
"""Reset Config / Hosts."""
|
||||
self.servers = DNS_SERVERS
|
||||
|
||||
with suppress(OSError):
|
||||
self.hosts.unlink()
|
||||
self._import_hosts()
|
||||
|
||||
def _write_corefile(self) -> None:
|
||||
"""Write CoreDNS config."""
|
||||
try:
|
||||
corefile_template: Template = Template(COREDNS_TMPL.read_text())
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read coredns template file: %s", err)
|
||||
raise CoreDNSError() from None
|
||||
|
||||
# Generate config file
|
||||
dns_servers = self.servers + list(set(DNS_SERVERS) - set(self.servers))
|
||||
data = corefile_template.safe_substitute(servers=" ".join(dns_servers))
|
||||
|
||||
try:
|
||||
self.corefile.write_text(data)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't update corefile: %s", err)
|
||||
raise CoreDNSError() from None
|
||||
|
||||
def _import_hosts(self) -> None:
|
||||
"""Import hosts entry."""
|
||||
# Generate Default
|
||||
if not self.hosts.exists():
|
||||
self.add_host(self.sys_docker.network.supervisor, ["hassio", "supervisor"])
|
||||
self.add_host(
|
||||
self.sys_docker.network.gateway, ["homeassistant", "home-assistant"]
|
||||
)
|
||||
return
|
||||
|
||||
# Import Exists host table
|
||||
try:
|
||||
with self.hosts.open("r") as hosts:
|
||||
for line in hosts.readlines():
|
||||
try:
|
||||
data = line.split(" ")
|
||||
self._hosts[IPv4Address(data[0])] = data[1:]
|
||||
except AddressValueError:
|
||||
_LOGGER.warning("Fails to read %s", line)
|
||||
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read hosts file: %s", err)
|
||||
raise CoreDNSError() from None
|
||||
|
||||
def _write_hosts(self) -> None:
|
||||
"""Write hosts from memory to file."""
|
||||
try:
|
||||
with self.hosts.open("w") as hosts:
|
||||
for address, hostnames in self._hosts.items():
|
||||
host = " ".join(hostnames)
|
||||
hosts.write(f"{address!s} {host}")
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write hosts file: %s", err)
|
||||
raise CoreDNSError() from None
|
||||
|
||||
def add_host(self, ipv4: IPv4Address, names: List[str]) -> None:
|
||||
"""Add a new host entry."""
|
||||
hostnames: List[str] = []
|
||||
for name in names:
|
||||
hostnames.append(name)
|
||||
hostnames.append(f"{name}.{DNS_SUFFIX}")
|
||||
|
||||
self._hosts[ipv4] = hostnames
|
||||
_LOGGER.debug("Add Host entry %s -> %s", ipv4, hostnames)
|
||||
|
||||
self._write_hosts()
|
||||
|
||||
def delete_host(
|
||||
self, ipv4: Optional[IPv4Address] = None, host: Optional[str] = None
|
||||
) -> None:
|
||||
"""Remove a entry from hosts."""
|
||||
if host:
|
||||
for address, hostnames in self._hosts.items():
|
||||
if host not in hostnames:
|
||||
continue
|
||||
ipv4 = address
|
||||
break
|
||||
|
||||
# Remove entry
|
||||
if ipv4:
|
||||
_LOGGER.debug("Remove Host entry %s", ipv4)
|
||||
self._hosts.pop(ipv4, None)
|
||||
|
||||
self._write_hosts()
|
||||
else:
|
||||
_LOGGER.warning("Can't remove Host entry: %s/%s", ipv4, host)
|
||||
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Get CoreDNS docker logs.
|
||||
|
||||
Return Coroutine.
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
async def stats(self) -> DockerStats:
|
||||
"""Return stats of CoreDNS."""
|
||||
try:
|
||||
return await self.instance.stats()
|
||||
except DockerAPIError:
|
||||
raise CoreDNSError() from None
|
||||
|
||||
def is_running(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_running()
|
||||
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if a Docker container is fails state.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_fails()
|
||||
|
||||
async def repair(self) -> None:
|
||||
"""Repair CoreDNS plugin."""
|
||||
if await self.instance.exists():
|
||||
return
|
||||
|
||||
_LOGGER.info("Repair CoreDNS %s", self.version)
|
||||
try:
|
||||
await self.instance.install(self.version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.error("Repairing of CoreDNS fails")
|
||||
|
||||
def _update_local_resolv(self) -> None:
|
||||
"""Update local resolv file."""
|
||||
resolv_lines: List[str] = []
|
||||
nameserver = f"nameserver {self.sys_docker.network.dns!s}"
|
||||
|
||||
# Read resolv config
|
||||
try:
|
||||
with RESOLV_CONF.open("r") as resolv:
|
||||
for line in resolv.readlines():
|
||||
resolv_lines.append(line)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read local resolv: %s", err)
|
||||
raise CoreDNSError() from None
|
||||
|
||||
if nameserver in resolv_lines:
|
||||
return
|
||||
_LOGGER.info("Update resolv from Supervisor")
|
||||
|
||||
# Write config back to resolv
|
||||
resolv_lines.append(nameserver)
|
||||
try:
|
||||
with RESOLV_CONF.open("w") as resolv:
|
||||
for line in resolv_lines:
|
||||
resolv.write(line)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write local resolv: %s", err)
|
||||
raise CoreDNSError() from None
|
@@ -1,178 +0,0 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from contextlib import suppress
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
import attr
|
||||
import docker
|
||||
|
||||
from ..const import SOCKET_DOCKER, DNS_SUFFIX
|
||||
from ..exceptions import DockerAPIError
|
||||
from .network import DockerNetwork
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class CommandReturn:
|
||||
"""Return object from command run."""
|
||||
|
||||
exit_code: int = attr.ib()
|
||||
output: bytes = attr.ib()
|
||||
|
||||
|
||||
class DockerAPI:
|
||||
"""Docker Hass.io wrapper.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.docker: docker.DockerClient = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)), version="auto", timeout=900
|
||||
)
|
||||
self.network: DockerNetwork = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
def images(self) -> docker.models.images.ImageCollection:
|
||||
"""Return API images."""
|
||||
return self.docker.images
|
||||
|
||||
@property
|
||||
def containers(self) -> docker.models.containers.ContainerCollection:
|
||||
"""Return API containers."""
|
||||
return self.docker.containers
|
||||
|
||||
@property
|
||||
def api(self) -> docker.APIClient:
|
||||
"""Return API containers."""
|
||||
return self.docker.api
|
||||
|
||||
def run(
|
||||
self,
|
||||
image: str,
|
||||
version: str = "latest",
|
||||
ipv4: Optional[IPv4Address] = None,
|
||||
**kwargs: Dict[str, Any],
|
||||
) -> docker.models.containers.Container:
|
||||
""""Create a Docker container and run it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
name: str = kwargs.get("name", image)
|
||||
network_mode: str = kwargs.get("network_mode")
|
||||
hostname: str = kwargs.get("hostname")
|
||||
|
||||
# Setup DNS
|
||||
kwargs["dns"] = [str(self.network.dns)]
|
||||
kwargs["dns_search"] = [DNS_SUFFIX]
|
||||
kwargs["domainname"] = DNS_SUFFIX
|
||||
|
||||
# Setup network
|
||||
if not network_mode:
|
||||
kwargs["network"] = None
|
||||
|
||||
# Create container
|
||||
try:
|
||||
container = self.docker.containers.create(
|
||||
f"{image}:{version}", use_config_proxy=False, **kwargs
|
||||
)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
# Attach network
|
||||
if not network_mode:
|
||||
alias = [hostname] if hostname else None
|
||||
try:
|
||||
self.network.attach_container(container, alias=alias, ipv4=ipv4)
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||
else:
|
||||
with suppress(DockerAPIError):
|
||||
self.network.detach_default_bridge(container)
|
||||
|
||||
# Run container
|
||||
try:
|
||||
container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", name, err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
# Update metadata
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.reload()
|
||||
|
||||
return container
|
||||
|
||||
def run_command(
|
||||
self,
|
||||
image: str,
|
||||
version: str = "latest",
|
||||
command: Optional[str] = None,
|
||||
**kwargs: Dict[str, Any],
|
||||
) -> CommandReturn:
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
stdout = kwargs.get("stdout", True)
|
||||
stderr = kwargs.get("stderr", True)
|
||||
|
||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||
try:
|
||||
container = self.docker.containers.run(
|
||||
f"{image}:{version}",
|
||||
command=command,
|
||||
network=self.network.name,
|
||||
use_config_proxy=False,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
result = container.wait()
|
||||
output = container.logs(stdout=stdout, stderr=stderr)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command: %s", err)
|
||||
raise DockerAPIError() from None
|
||||
|
||||
finally:
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return CommandReturn(result.get("StatusCode"), output)
|
||||
|
||||
def repair(self) -> None:
|
||||
"""Repair local docker overlayfs2 issues."""
|
||||
|
||||
_LOGGER.info("Prune stale containers")
|
||||
try:
|
||||
output = self.docker.api.prune_containers()
|
||||
_LOGGER.debug("Containers prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for containers prune: %s", err)
|
||||
|
||||
_LOGGER.info("Prune stale images")
|
||||
try:
|
||||
output = self.docker.api.prune_images(filters={"dangling": False})
|
||||
_LOGGER.debug("Images prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for images prune: %s", err)
|
||||
|
||||
_LOGGER.info("Prune stale builds")
|
||||
try:
|
||||
output = self.docker.api.prune_builds()
|
||||
_LOGGER.debug("Builds prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for builds prune: %s", err)
|
||||
|
||||
_LOGGER.info("Prune stale volumes")
|
||||
try:
|
||||
output = self.docker.api.prune_builds()
|
||||
_LOGGER.debug("Volumes prune: %s", output)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning("Error for volumes prune: %s", err)
|
@@ -1,38 +0,0 @@
|
||||
"""HassOS Cli docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for HassOS Cli."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS CLI image."""
|
||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
||||
|
||||
def _stop(self, remove_container=True):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
def _attach(self, tag: str):
|
||||
"""Attach to running Docker container.
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(f"{self.image}:{tag}")
|
||||
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info(
|
||||
"Found HassOS CLI %s with version %s", self.image, self.version
|
||||
)
|
@@ -1,72 +0,0 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
import os
|
||||
from typing import Awaitable
|
||||
|
||||
import docker
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import DockerAPIError
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for Supervisor."""
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of Docker container."""
|
||||
return os.environ["SUPERVISOR_NAME"]
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP address of this container."""
|
||||
return self.sys_docker.network.supervisor
|
||||
|
||||
def _attach(self, tag: str) -> None:
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
raise DockerAPIError() from None
|
||||
|
||||
self._meta = docker_container.attrs
|
||||
_LOGGER.info(
|
||||
"Attach to Supervisor %s with version %s",
|
||||
self.image,
|
||||
self.sys_supervisor.version,
|
||||
)
|
||||
|
||||
# If already attach
|
||||
if docker_container in self.sys_docker.network.containers:
|
||||
return
|
||||
|
||||
# Attach to network
|
||||
_LOGGER.info("Connect Supervisor to Hass.io Network")
|
||||
self.sys_docker.network.attach_container(
|
||||
docker_container, alias=["hassio"], ipv4=self.sys_docker.network.supervisor
|
||||
)
|
||||
|
||||
def retag(self) -> Awaitable[None]:
|
||||
"""Retag latest image to version."""
|
||||
return self.sys_run_in_executor(self._retag)
|
||||
|
||||
def _retag(self) -> None:
|
||||
"""Retag latest image to version.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
|
||||
docker_container.image.tag(self.image, tag=self.version)
|
||||
docker_container.image.tag(self.image, tag="latest")
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't retag supervisor version: %s", err)
|
||||
raise DockerAPIError() from None
|
@@ -1,609 +0,0 @@
|
||||
"""Home Assistant control object."""
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from datetime import datetime, timedelta
|
||||
from distutils.version import StrictVersion
|
||||
from ipaddress import IPv4Address
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import re
|
||||
import secrets
|
||||
import time
|
||||
from typing import Any, AsyncContextManager, Awaitable, Dict, Optional
|
||||
from uuid import UUID
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs
|
||||
import attr
|
||||
|
||||
from .const import (
|
||||
ATTR_ACCESS_TOKEN,
|
||||
ATTR_BOOT,
|
||||
ATTR_IMAGE,
|
||||
ATTR_LAST_VERSION,
|
||||
ATTR_PASSWORD,
|
||||
ATTR_PORT,
|
||||
ATTR_REFRESH_TOKEN,
|
||||
ATTR_SSL,
|
||||
ATTR_UUID,
|
||||
ATTR_VERSION,
|
||||
ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG,
|
||||
FILE_HASSIO_HOMEASSISTANT,
|
||||
HEADER_HA_ACCESS,
|
||||
)
|
||||
from .coresys import CoreSys, CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .docker.stats import DockerStats
|
||||
from .exceptions import (
|
||||
DockerAPIError,
|
||||
HomeAssistantAPIError,
|
||||
HomeAssistantAuthError,
|
||||
HomeAssistantError,
|
||||
HomeAssistantUpdateError,
|
||||
)
|
||||
from .utils import check_port, convert_to_ascii, process_lock
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class ConfigResult:
|
||||
"""Return object from config check."""
|
||||
|
||||
valid = attr.ib()
|
||||
log = attr.ib()
|
||||
|
||||
|
||||
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Home Assistant core object for handle it."""
|
||||
|
||||
def __init__(self, coresys: CoreSys):
|
||||
"""Initialize Home Assistant object."""
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.coresys: CoreSys = coresys
|
||||
self.instance: DockerHomeAssistant = DockerHomeAssistant(coresys)
|
||||
self.lock: asyncio.Lock = asyncio.Lock(loop=coresys.loop)
|
||||
self._error_state: bool = False
|
||||
|
||||
# We don't persist access tokens. Instead we fetch new ones when needed
|
||||
self.access_token: Optional[str] = None
|
||||
self._access_token_expires: Optional[datetime] = None
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Prepare Home Assistant object."""
|
||||
try:
|
||||
# Evaluate Version if we lost this information
|
||||
if not self.version:
|
||||
self.version = await self.instance.get_latest_version(key=StrictVersion)
|
||||
|
||||
await self.instance.attach(tag=self.version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.info("No Home Assistant Docker image %s found.", self.image)
|
||||
await self.install_landingpage()
|
||||
else:
|
||||
self.version = self.instance.version
|
||||
self.save_data()
|
||||
|
||||
@property
|
||||
def machine(self) -> str:
|
||||
"""Return the system machines."""
|
||||
return self.instance.machine
|
||||
|
||||
@property
|
||||
def arch(self) -> str:
|
||||
"""Return arch of running Home Assistant."""
|
||||
return self.instance.arch
|
||||
|
||||
@property
|
||||
def error_state(self) -> bool:
|
||||
"""Return True if system is in error."""
|
||||
return self._error_state
|
||||
|
||||
@property
|
||||
def ip_address(self) -> IPv4Address:
|
||||
"""Return IP of Home Assistant instance."""
|
||||
return self.instance.ip_address
|
||||
|
||||
@property
|
||||
def api_port(self) -> int:
|
||||
"""Return network port to Home Assistant instance."""
|
||||
return self._data[ATTR_PORT]
|
||||
|
||||
@api_port.setter
|
||||
def api_port(self, value: int) -> None:
|
||||
"""Set network port for Home Assistant instance."""
|
||||
self._data[ATTR_PORT] = value
|
||||
|
||||
@property
|
||||
def api_password(self) -> str:
|
||||
"""Return password for Home Assistant instance."""
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@api_password.setter
|
||||
def api_password(self, value: str):
|
||||
"""Set password for Home Assistant instance."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
|
||||
@property
|
||||
def api_ssl(self) -> bool:
|
||||
"""Return if we need ssl to Home Assistant instance."""
|
||||
return self._data[ATTR_SSL]
|
||||
|
||||
@api_ssl.setter
|
||||
def api_ssl(self, value: bool):
|
||||
"""Set SSL for Home Assistant instance."""
|
||||
self._data[ATTR_SSL] = value
|
||||
|
||||
@property
|
||||
def api_url(self) -> str:
|
||||
"""Return API url to Home Assistant."""
|
||||
return "{}://{}:{}".format(
|
||||
"https" if self.api_ssl else "http", self.ip_address, self.api_port
|
||||
)
|
||||
|
||||
@property
|
||||
def watchdog(self) -> bool:
|
||||
"""Return True if the watchdog should protect Home Assistant."""
|
||||
return self._data[ATTR_WATCHDOG]
|
||||
|
||||
@watchdog.setter
|
||||
def watchdog(self, value: bool):
|
||||
"""Return True if the watchdog should protect Home Assistant."""
|
||||
self._data[ATTR_WATCHDOG] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self) -> int:
|
||||
"""Return time to wait for Home Assistant startup."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value: int):
|
||||
"""Set time to wait for Home Assistant startup."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str:
|
||||
"""Return last available version of Home Assistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.sys_updater.version_homeassistant
|
||||
|
||||
@latest_version.setter
|
||||
def latest_version(self, value: str):
|
||||
"""Set last available version of Home Assistant."""
|
||||
if value:
|
||||
self._data[ATTR_LAST_VERSION] = value
|
||||
else:
|
||||
self._data.pop(ATTR_LAST_VERSION, None)
|
||||
|
||||
@property
|
||||
def image(self) -> str:
|
||||
"""Return image name of the Home Assistant container."""
|
||||
if self._data.get(ATTR_IMAGE):
|
||||
return self._data[ATTR_IMAGE]
|
||||
return os.environ["HOMEASSISTANT_REPOSITORY"]
|
||||
|
||||
@image.setter
|
||||
def image(self, value: str):
|
||||
"""Set image name of Home Assistant container."""
|
||||
if value:
|
||||
self._data[ATTR_IMAGE] = value
|
||||
else:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
|
||||
@property
|
||||
def is_custom_image(self) -> bool:
|
||||
"""Return True if a custom image is used."""
|
||||
return all(attr in self._data for attr in (ATTR_IMAGE, ATTR_LAST_VERSION))
|
||||
|
||||
@property
|
||||
def version(self) -> Optional[str]:
|
||||
"""Return version of local version."""
|
||||
return self._data.get(ATTR_VERSION)
|
||||
|
||||
@version.setter
|
||||
def version(self, value: str) -> None:
|
||||
"""Set installed version."""
|
||||
self._data[ATTR_VERSION] = value
|
||||
|
||||
@property
|
||||
def boot(self) -> bool:
|
||||
"""Return True if Home Assistant boot is enabled."""
|
||||
return self._data[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value: bool):
|
||||
"""Set Home Assistant boot options."""
|
||||
self._data[ATTR_BOOT] = value
|
||||
|
||||
@property
|
||||
def uuid(self) -> UUID:
|
||||
"""Return a UUID of this Home Assistant instance."""
|
||||
return self._data[ATTR_UUID]
|
||||
|
||||
@property
|
||||
def hassio_token(self) -> str:
|
||||
"""Return an access token for the Hass.io API."""
|
||||
return self._data.get(ATTR_ACCESS_TOKEN)
|
||||
|
||||
@property
|
||||
def refresh_token(self) -> str:
|
||||
"""Return the refresh token to authenticate with Home Assistant."""
|
||||
return self._data.get(ATTR_REFRESH_TOKEN)
|
||||
|
||||
@refresh_token.setter
|
||||
def refresh_token(self, value: str):
|
||||
"""Set Home Assistant refresh_token."""
|
||||
self._data[ATTR_REFRESH_TOKEN] = value
|
||||
|
||||
@process_lock
|
||||
async def install_landingpage(self) -> None:
|
||||
"""Install a landing page."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
while True:
|
||||
try:
|
||||
await self.instance.install("landingpage")
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Fails install landingpage, retry after 30sec")
|
||||
await asyncio.sleep(30)
|
||||
else:
|
||||
break
|
||||
|
||||
self.version = self.instance.version
|
||||
self.save_data()
|
||||
|
||||
@process_lock
|
||||
async def install(self) -> None:
|
||||
"""Install a landing page."""
|
||||
_LOGGER.info("Setup Home Assistant")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.latest_version:
|
||||
await self.sys_updater.reload()
|
||||
|
||||
tag = self.latest_version
|
||||
if tag:
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.update(tag)
|
||||
break
|
||||
_LOGGER.warning("Error on install Home Assistant. Retry in 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
_LOGGER.info("Home Assistant docker now installed")
|
||||
self.version = self.instance.version
|
||||
self.save_data()
|
||||
|
||||
# finishing
|
||||
try:
|
||||
_LOGGER.info("Start Home Assistant")
|
||||
await self._start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Can't start Home Assistant!")
|
||||
|
||||
# Cleanup
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.cleanup()
|
||||
|
||||
@process_lock
|
||||
async def update(self, version: Optional[str] = None) -> None:
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.latest_version
|
||||
rollback = self.version if not self.error_state else None
|
||||
running = await self.instance.is_running()
|
||||
exists = await self.instance.exists()
|
||||
|
||||
if exists and version == self.instance.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return
|
||||
|
||||
# process an update
|
||||
async def _update(to_version: str) -> None:
|
||||
"""Run Home Assistant update."""
|
||||
_LOGGER.info("Update Home Assistant to version %s", to_version)
|
||||
try:
|
||||
await self.instance.update(to_version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.warning("Update Home Assistant image fails")
|
||||
raise HomeAssistantUpdateError() from None
|
||||
else:
|
||||
self.version = self.instance.version
|
||||
|
||||
if running:
|
||||
await self._start()
|
||||
|
||||
_LOGGER.info("Successful run Home Assistant %s", to_version)
|
||||
self.save_data()
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.cleanup()
|
||||
|
||||
# Update Home Assistant
|
||||
with suppress(HomeAssistantError):
|
||||
await _update(version)
|
||||
return
|
||||
|
||||
# Update going wrong, revert it
|
||||
if self.error_state and rollback:
|
||||
_LOGGER.fatal("HomeAssistant update fails -> rollback!")
|
||||
await _update(rollback)
|
||||
else:
|
||||
raise HomeAssistantUpdateError()
|
||||
|
||||
async def _start(self) -> None:
|
||||
"""Start Home Assistant Docker & wait."""
|
||||
if await self.instance.is_running():
|
||||
_LOGGER.warning("Home Assistant is already running!")
|
||||
return
|
||||
|
||||
# Create new API token
|
||||
self._data[ATTR_ACCESS_TOKEN] = secrets.token_hex(56)
|
||||
self.save_data()
|
||||
|
||||
try:
|
||||
await self.instance.run()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
async def start(self) -> None:
|
||||
"""Run Home Assistant docker."""
|
||||
try:
|
||||
if await self.instance.is_running():
|
||||
await self.instance.restart()
|
||||
elif await self.instance.is_initialize():
|
||||
await self.instance.start()
|
||||
else:
|
||||
await self._start()
|
||||
return
|
||||
|
||||
await self._block_till_run()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
@process_lock
|
||||
async def stop(self) -> None:
|
||||
"""Stop Home Assistant Docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
try:
|
||||
return await self.instance.stop(remove_container=False)
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
@process_lock
|
||||
async def restart(self) -> None:
|
||||
"""Restart Home Assistant Docker."""
|
||||
try:
|
||||
await self.instance.restart()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
async def rebuild(self) -> None:
|
||||
"""Rebuild Home Assistant Docker container."""
|
||||
with suppress(DockerAPIError):
|
||||
await self.instance.stop()
|
||||
await self._start()
|
||||
|
||||
def logs(self) -> Awaitable[bytes]:
|
||||
"""Get HomeAssistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
async def stats(self) -> DockerStats:
|
||||
"""Return stats of Home Assistant.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
try:
|
||||
return await self.instance.stats()
|
||||
except DockerAPIError:
|
||||
raise HomeAssistantError() from None
|
||||
|
||||
def is_running(self) -> Awaitable[bool]:
|
||||
"""Return True if Docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_running()
|
||||
|
||||
def is_fails(self) -> Awaitable[bool]:
|
||||
"""Return True if a Docker container is fails state.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_fails()
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool:
|
||||
"""Return True if a task is in progress."""
|
||||
return self.instance.in_progress or self.lock.locked()
|
||||
|
||||
async def check_config(self) -> ConfigResult:
|
||||
"""Run Home Assistant config check."""
|
||||
result = await self.instance.execute_command(
|
||||
"python3 -m homeassistant -c /config --script check_config"
|
||||
)
|
||||
|
||||
# if not valid
|
||||
if result.exit_code is None:
|
||||
_LOGGER.error("Fatal error on config check!")
|
||||
raise HomeAssistantError()
|
||||
|
||||
# parse output
|
||||
log = convert_to_ascii(result.output)
|
||||
if result.exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||
_LOGGER.error("Invalid Home Assistant config found!")
|
||||
return ConfigResult(False, log)
|
||||
|
||||
_LOGGER.info("Home Assistant config is valid")
|
||||
return ConfigResult(True, log)
|
||||
|
||||
async def ensure_access_token(self) -> None:
|
||||
"""Ensures there is an access token."""
|
||||
if (
|
||||
self.access_token is not None
|
||||
and self._access_token_expires > datetime.utcnow()
|
||||
):
|
||||
return
|
||||
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
async with self.sys_websession_ssl.post(
|
||||
f"{self.api_url}/auth/token",
|
||||
timeout=30,
|
||||
data={
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": self.refresh_token,
|
||||
},
|
||||
) as resp:
|
||||
if resp.status != 200:
|
||||
_LOGGER.error("Can't update Home Assistant access token!")
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
_LOGGER.info("Updated Home Assistant API token")
|
||||
tokens = await resp.json()
|
||||
self.access_token = tokens["access_token"]
|
||||
self._access_token_expires = datetime.utcnow() + timedelta(
|
||||
seconds=tokens["expires_in"]
|
||||
)
|
||||
|
||||
@asynccontextmanager
|
||||
async def make_request(
|
||||
self,
|
||||
method: str,
|
||||
path: str,
|
||||
json: Optional[Dict[str, Any]] = None,
|
||||
content_type: Optional[str] = None,
|
||||
data: Optional[bytes] = None,
|
||||
timeout: int = 30,
|
||||
params: Optional[Dict[str, str]] = None,
|
||||
) -> AsyncContextManager[aiohttp.ClientResponse]:
|
||||
"""Async context manager to make a request with right auth."""
|
||||
url = f"{self.api_url}/{path}"
|
||||
headers = {}
|
||||
|
||||
# Passthrough content type
|
||||
if content_type is not None:
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
|
||||
# Set old API Password
|
||||
if not self.refresh_token and self.api_password:
|
||||
headers[HEADER_HA_ACCESS] = self.api_password
|
||||
|
||||
for _ in (1, 2):
|
||||
# Prepare Access token
|
||||
if self.refresh_token:
|
||||
await self.ensure_access_token()
|
||||
headers[hdrs.AUTHORIZATION] = f"Bearer {self.access_token}"
|
||||
|
||||
try:
|
||||
async with getattr(self.sys_websession_ssl, method)(
|
||||
url,
|
||||
data=data,
|
||||
timeout=timeout,
|
||||
json=json,
|
||||
headers=headers,
|
||||
params=params,
|
||||
) as resp:
|
||||
# Access token expired
|
||||
if resp.status == 401 and self.refresh_token:
|
||||
self.access_token = None
|
||||
continue
|
||||
yield resp
|
||||
return
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||
_LOGGER.error("Error on call %s: %s", url, err)
|
||||
break
|
||||
|
||||
raise HomeAssistantAPIError()
|
||||
|
||||
async def check_api_state(self) -> bool:
|
||||
"""Return True if Home Assistant up and running."""
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.make_request("get", "api/") as resp:
|
||||
if resp.status in (200, 201):
|
||||
return True
|
||||
status = resp.status
|
||||
_LOGGER.warning("Home Assistant API config mismatch: %s", status)
|
||||
|
||||
return False
|
||||
|
||||
async def _block_till_run(self) -> None:
|
||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||
start_time = time.monotonic()
|
||||
|
||||
# Database migration
|
||||
migration_progress = False
|
||||
migration_file = Path(self.sys_config.path_homeassistant, ".migration_progress")
|
||||
|
||||
# PIP installation
|
||||
pip_progress = False
|
||||
pip_file = Path(self.sys_config.path_homeassistant, ".pip_progress")
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(5)
|
||||
|
||||
# 1: Check if Container is is_running
|
||||
if not await self.instance.is_running():
|
||||
_LOGGER.error("Home Assistant has crashed!")
|
||||
break
|
||||
|
||||
# 2: Check if API response
|
||||
if await self.sys_run_in_executor(
|
||||
check_port, self.ip_address, self.api_port
|
||||
):
|
||||
_LOGGER.info("Detect a running Home Assistant instance")
|
||||
self._error_state = False
|
||||
return
|
||||
|
||||
# 3: Running DB Migration
|
||||
if migration_file.exists():
|
||||
if not migration_progress:
|
||||
migration_progress = True
|
||||
_LOGGER.info("Home Assistant record migration in progress")
|
||||
continue
|
||||
elif migration_progress:
|
||||
migration_progress = False # Reset start time
|
||||
start_time = time.monotonic()
|
||||
_LOGGER.info("Home Assistant record migration done")
|
||||
|
||||
# 4: Running PIP installation
|
||||
if pip_file.exists():
|
||||
if not pip_progress:
|
||||
pip_progress = True
|
||||
_LOGGER.info("Home Assistant pip installation in progress")
|
||||
continue
|
||||
elif pip_progress:
|
||||
pip_progress = False # Reset start time
|
||||
start_time = time.monotonic()
|
||||
_LOGGER.info("Home Assistant pip installation done")
|
||||
|
||||
# 5: Timeout
|
||||
if time.monotonic() - start_time > self.wait_boot:
|
||||
_LOGGER.warning("Don't wait anymore of Home Assistant startup!")
|
||||
break
|
||||
|
||||
self._error_state = True
|
||||
raise HomeAssistantError()
|
||||
|
||||
async def repair(self):
|
||||
"""Repair local Home Assistant data."""
|
||||
if await self.instance.exists():
|
||||
return
|
||||
|
||||
_LOGGER.info("Repair Home Assistant %s", self.version)
|
||||
try:
|
||||
await self.instance.install(self.version)
|
||||
except DockerAPIError:
|
||||
_LOGGER.error("Repairing of Home Assistant fails")
|
@@ -1,93 +0,0 @@
|
||||
"""Host function like audio, D-Bus or systemd."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
from .alsa import AlsaAudio
|
||||
from .apparmor import AppArmorControl
|
||||
from .control import SystemControl
|
||||
from .info import InfoCenter
|
||||
from .services import ServiceManager
|
||||
from ..const import (
|
||||
FEATURES_REBOOT,
|
||||
FEATURES_SHUTDOWN,
|
||||
FEATURES_HOSTNAME,
|
||||
FEATURES_SERVICES,
|
||||
FEATURES_HASSOS,
|
||||
)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HassioError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HostManager(CoreSysAttributes):
|
||||
"""Manage supported function from host."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Host manager."""
|
||||
self.coresys = coresys
|
||||
self._alsa = AlsaAudio(coresys)
|
||||
self._apparmor = AppArmorControl(coresys)
|
||||
self._control = SystemControl(coresys)
|
||||
self._info = InfoCenter(coresys)
|
||||
self._services = ServiceManager(coresys)
|
||||
|
||||
@property
|
||||
def alsa(self):
|
||||
"""Return host ALSA handler."""
|
||||
return self._alsa
|
||||
|
||||
@property
|
||||
def apparmor(self):
|
||||
"""Return host AppArmor handler."""
|
||||
return self._apparmor
|
||||
|
||||
@property
|
||||
def control(self):
|
||||
"""Return host control handler."""
|
||||
return self._control
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
"""Return host info handler."""
|
||||
return self._info
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
"""Return host services handler."""
|
||||
return self._services
|
||||
|
||||
@property
|
||||
def supperted_features(self):
|
||||
"""Return a list of supported host features."""
|
||||
features = []
|
||||
|
||||
if self.sys_dbus.systemd.is_connected:
|
||||
features.extend([FEATURES_REBOOT, FEATURES_SHUTDOWN, FEATURES_SERVICES])
|
||||
|
||||
if self.sys_dbus.hostname.is_connected:
|
||||
features.append(FEATURES_HOSTNAME)
|
||||
|
||||
if self.sys_hassos.available:
|
||||
features.append(FEATURES_HASSOS)
|
||||
|
||||
return features
|
||||
|
||||
async def reload(self):
|
||||
"""Reload host functions."""
|
||||
if self.sys_dbus.hostname.is_connected:
|
||||
await self.info.update()
|
||||
|
||||
if self.sys_dbus.systemd.is_connected:
|
||||
await self.services.update()
|
||||
|
||||
async def load(self):
|
||||
"""Load host information."""
|
||||
with suppress(HassioError):
|
||||
await self.reload()
|
||||
|
||||
# Load profile data
|
||||
try:
|
||||
await self.apparmor.load()
|
||||
except HassioError as err:
|
||||
_LOGGER.waring("Load host AppArmor on start fails: %s", err)
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user