mirror of
https://github.com/home-assistant/supervisor.git
synced 2025-08-11 18:19:21 +00:00
Compare commits
840 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
598108d294 | ||
![]() |
a0261dbbcc | ||
![]() |
2418122b46 | ||
![]() |
f104e60afa | ||
![]() |
ed45f27f3e | ||
![]() |
40aa5c9caf | ||
![]() |
14b1ea4eb0 | ||
![]() |
5052a339e3 | ||
![]() |
2321890dde | ||
![]() |
4cb5770ee0 | ||
![]() |
3a35561d1d | ||
![]() |
6fbec53f8a | ||
![]() |
c707934018 | ||
![]() |
efd8efa248 | ||
![]() |
979861b764 | ||
![]() |
cdc53a159c | ||
![]() |
a203ed9cc5 | ||
![]() |
5cab5f0c08 | ||
![]() |
25ea80e169 | ||
![]() |
f43b4e9e24 | ||
![]() |
160fbb2589 | ||
![]() |
c85aa664e1 | ||
![]() |
51dcbf5db7 | ||
![]() |
fa114a4a03 | ||
![]() |
d7fd58bdb9 | ||
![]() |
38b0aea8e2 | ||
![]() |
41eade9325 | ||
![]() |
e64cf41aec | ||
![]() |
02872b5e75 | ||
![]() |
e4d49bb459 | ||
![]() |
d38b7d5a82 | ||
![]() |
537c5d3197 | ||
![]() |
575df2fcf6 | ||
![]() |
c08c3c6b37 | ||
![]() |
2acf28609e | ||
![]() |
bb59d0431e | ||
![]() |
1c7b1f1462 | ||
![]() |
f32d17d924 | ||
![]() |
928a4d8dce | ||
![]() |
dd3ba93308 | ||
![]() |
7e1b179cdd | ||
![]() |
a9a2c35f06 | ||
![]() |
58b88a6919 | ||
![]() |
f937876a1b | ||
![]() |
8193f43634 | ||
![]() |
1d3f880f82 | ||
![]() |
ef2fa8d2e2 | ||
![]() |
51997b3e7c | ||
![]() |
98785b00e2 | ||
![]() |
8d3694884d | ||
![]() |
a2821a98ad | ||
![]() |
8d552ae15c | ||
![]() |
6db4c60f47 | ||
![]() |
805c0385a0 | ||
![]() |
cea6e7a9f2 | ||
![]() |
127073c01b | ||
![]() |
30fe36ae05 | ||
![]() |
58bd677832 | ||
![]() |
1a3b369dd7 | ||
![]() |
6e38216abd | ||
![]() |
efcfc1f841 | ||
![]() |
8dea50ce83 | ||
![]() |
7a5a01bdcc | ||
![]() |
bd1450a682 | ||
![]() |
c538c1ce7f | ||
![]() |
b6d59c4f64 | ||
![]() |
a758ccaf5c | ||
![]() |
e8b04cc20a | ||
![]() |
9bcb15dbc0 | ||
![]() |
1e953167b6 | ||
![]() |
979586cdb2 | ||
![]() |
cd31fad56d | ||
![]() |
ff57d88e2a | ||
![]() |
06cb5e171e | ||
![]() |
a8b70a2e13 | ||
![]() |
948019ccee | ||
![]() |
89ed109505 | ||
![]() |
fae246c503 | ||
![]() |
2411b4287d | ||
![]() |
b3308ecbe0 | ||
![]() |
3541cbff5e | ||
![]() |
838ba7ff36 | ||
![]() |
e9802f92c9 | ||
![]() |
016fd24859 | ||
![]() |
d315e81ab2 | ||
![]() |
97c38b8534 | ||
![]() |
011e2b3df5 | ||
![]() |
e3ee9a299f | ||
![]() |
d73c10f874 | ||
![]() |
9e448b46ba | ||
![]() |
9f09c46789 | ||
![]() |
fe6634551a | ||
![]() |
22a7931a7c | ||
![]() |
94f112512f | ||
![]() |
b6509dca1f | ||
![]() |
620234e708 | ||
![]() |
d50e866cec | ||
![]() |
76ad6dca02 | ||
![]() |
cdb1520a63 | ||
![]() |
bbef706a33 | ||
![]() |
835509901f | ||
![]() |
b51f9586c4 | ||
![]() |
fc83cb9559 | ||
![]() |
f5f5f829ac | ||
![]() |
930eed4500 | ||
![]() |
01a8b58054 | ||
![]() |
eba1d01fc2 | ||
![]() |
84755836c9 | ||
![]() |
c9585033cb | ||
![]() |
2d312c276f | ||
![]() |
3b0d0e9928 | ||
![]() |
8307b153e3 | ||
![]() |
dfaffe3ec5 | ||
![]() |
8d7b15cbeb | ||
![]() |
00969a67ac | ||
![]() |
a374d4e817 | ||
![]() |
f5dda39f63 | ||
![]() |
fb5d54d5fe | ||
![]() |
d392b35fdd | ||
![]() |
3ceec006ac | ||
![]() |
62a574c6bd | ||
![]() |
821c10b2bd | ||
![]() |
fa3269a098 | ||
![]() |
a9bdab4b49 | ||
![]() |
0df5b7d87b | ||
![]() |
4861fc70ce | ||
![]() |
47c443bb92 | ||
![]() |
9cb4b49597 | ||
![]() |
865523fd37 | ||
![]() |
1df35a6fe1 | ||
![]() |
e70c9d8a30 | ||
![]() |
7d6b00ea4a | ||
![]() |
e5fc985915 | ||
![]() |
71ccaa2bd0 | ||
![]() |
e127f23a08 | ||
![]() |
495f9f2373 | ||
![]() |
27274286db | ||
![]() |
85ba886029 | ||
![]() |
2f3a868e42 | ||
![]() |
a51b80f456 | ||
![]() |
f27a426879 | ||
![]() |
19ca485c28 | ||
![]() |
7deed55c2d | ||
![]() |
4c5c6f072c | ||
![]() |
f174e08ad6 | ||
![]() |
2658f95347 | ||
![]() |
311c981d1a | ||
![]() |
d6d3bf0583 | ||
![]() |
a1a601a4d3 | ||
![]() |
14776eae76 | ||
![]() |
bef4034ab8 | ||
![]() |
ad988f2a24 | ||
![]() |
6599ae0ee0 | ||
![]() |
4f1ed690cd | ||
![]() |
4ffaee6013 | ||
![]() |
e1ce19547e | ||
![]() |
039040b972 | ||
![]() |
7a1af3d346 | ||
![]() |
1e98774b62 | ||
![]() |
4b4d6c6866 | ||
![]() |
65ff83d359 | ||
![]() |
e509c804ae | ||
![]() |
992827e225 | ||
![]() |
083e97add8 | ||
![]() |
05378d18c0 | ||
![]() |
3dd465acc9 | ||
![]() |
8f6e36f781 | ||
![]() |
85fe56db57 | ||
![]() |
8e07429e47 | ||
![]() |
ced6d702b9 | ||
![]() |
25d7de4dfa | ||
![]() |
82754c0dfe | ||
![]() |
e604b022ee | ||
![]() |
6b29022822 | ||
![]() |
2e671cc5ee | ||
![]() |
f25692b98c | ||
![]() |
c4a011b261 | ||
![]() |
a935bac20b | ||
![]() |
0a3a98cb42 | ||
![]() |
adb39ca93f | ||
![]() |
5fdc340e58 | ||
![]() |
bb64dca6e6 | ||
![]() |
685788bcdf | ||
![]() |
e949aa35f3 | ||
![]() |
fc80bf0df4 | ||
![]() |
bd9740e866 | ||
![]() |
3a260a8fd9 | ||
![]() |
c87e6a5a42 | ||
![]() |
8bc3319523 | ||
![]() |
bdfcf1a2df | ||
![]() |
7f4284f2af | ||
![]() |
fd69120aa6 | ||
![]() |
5df60b17e8 | ||
![]() |
cb835b5ae6 | ||
![]() |
9eab92513a | ||
![]() |
29e8f50ab8 | ||
![]() |
aa0496b236 | ||
![]() |
06e9cec21a | ||
![]() |
0fe27088df | ||
![]() |
54d226116d | ||
![]() |
4b37e30680 | ||
![]() |
7c5f710deb | ||
![]() |
5a3ebaf683 | ||
![]() |
233da0e48f | ||
![]() |
96380d8d28 | ||
![]() |
c84a0edf20 | ||
![]() |
a3cf445c93 | ||
![]() |
3f31979f66 | ||
![]() |
44416edfd2 | ||
![]() |
351c45da75 | ||
![]() |
e27c5dad15 | ||
![]() |
dc510f22ac | ||
![]() |
1b78011f8b | ||
![]() |
a908828bf4 | ||
![]() |
55b7eb62f6 | ||
![]() |
10e8fcf3b9 | ||
![]() |
f1b0c05447 | ||
![]() |
de22bd688e | ||
![]() |
9fe35b4fb5 | ||
![]() |
f13d08d37a | ||
![]() |
a0ecb46584 | ||
![]() |
0c57df0c8e | ||
![]() |
9c902c5c69 | ||
![]() |
af412c3105 | ||
![]() |
ec43448163 | ||
![]() |
9f7e0ecd55 | ||
![]() |
e50515a17c | ||
![]() |
7c345db6fe | ||
![]() |
51c2268c1e | ||
![]() |
51feca05a5 | ||
![]() |
3889504292 | ||
![]() |
7bd6ff374a | ||
![]() |
44fa34203a | ||
![]() |
ff351c7f6d | ||
![]() |
960b00d85a | ||
![]() |
18e3eacd7f | ||
![]() |
f4a1da33c4 | ||
![]() |
49de5be44e | ||
![]() |
383657e8ce | ||
![]() |
3af970ead6 | ||
![]() |
6caec79958 | ||
![]() |
33bbd92d9b | ||
![]() |
9dba78fbcd | ||
![]() |
630d85ec78 | ||
![]() |
f0d46e8671 | ||
![]() |
db0593f0b2 | ||
![]() |
1d83c0c77a | ||
![]() |
5e5fd3a79b | ||
![]() |
c61995aab8 | ||
![]() |
37c393f857 | ||
![]() |
8e043a01c1 | ||
![]() |
c7b6b2ddb3 | ||
![]() |
522f68bf68 | ||
![]() |
7d4866234f | ||
![]() |
7aa5bcfc7c | ||
![]() |
04b59f0896 | ||
![]() |
796f9a203e | ||
![]() |
22c8cda0d7 | ||
![]() |
1cf534ccc5 | ||
![]() |
6d8c821148 | ||
![]() |
264e9665b0 | ||
![]() |
53fa8e48c0 | ||
![]() |
e406aa4144 | ||
![]() |
4953ba5077 | ||
![]() |
0a97ac0578 | ||
![]() |
56af4752f4 | ||
![]() |
81413d08ed | ||
![]() |
2bc2a476d9 | ||
![]() |
4d070a65c6 | ||
![]() |
6185fbaf26 | ||
![]() |
698a126b93 | ||
![]() |
acf921f55d | ||
![]() |
f5a78c88f8 | ||
![]() |
206ece1575 | ||
![]() |
a8028dbe10 | ||
![]() |
c605af6ccc | ||
![]() |
b7b8e6c40e | ||
![]() |
3fcb1de419 | ||
![]() |
12034fe5fc | ||
![]() |
56959d781a | ||
![]() |
9a2f025646 | ||
![]() |
12cc163058 | ||
![]() |
74971d9753 | ||
![]() |
a9157e3a9f | ||
![]() |
b96697b708 | ||
![]() |
81e6896391 | ||
![]() |
2dcaa3608d | ||
![]() |
e21671ec5e | ||
![]() |
7841f14163 | ||
![]() |
cc9f594ab4 | ||
![]() |
ebfaaeaa6b | ||
![]() |
ffa91e150d | ||
![]() |
06fa9f9a9e | ||
![]() |
9f203c42ec | ||
![]() |
5d0d34a4af | ||
![]() |
c2cfc0d3d4 | ||
![]() |
0f4810d41f | ||
![]() |
175848f2a8 | ||
![]() |
472bd66f4d | ||
![]() |
168ea32d2c | ||
![]() |
e82d6b1ea4 | ||
![]() |
6c60ca088c | ||
![]() |
83e8f935fd | ||
![]() |
71867302a4 | ||
![]() |
8bcc402c5f | ||
![]() |
72b7d2a123 | ||
![]() |
20c1183450 | ||
![]() |
0bbfbd2544 | ||
![]() |
350bd9c32f | ||
![]() |
dcca8b0a9a | ||
![]() |
f77b479e45 | ||
![]() |
216565affb | ||
![]() |
6f235c2a11 | ||
![]() |
27a770bd1d | ||
![]() |
ef15b67571 | ||
![]() |
6aad966c52 | ||
![]() |
9811f11859 | ||
![]() |
13148ec7fb | ||
![]() |
b2d7464790 | ||
![]() |
ce84e185ad | ||
![]() |
c3f5ee43b6 | ||
![]() |
e2dc1a4471 | ||
![]() |
e787e59b49 | ||
![]() |
f0ed2eba2b | ||
![]() |
2364e1e652 | ||
![]() |
cc56944d75 | ||
![]() |
69cea9fc96 | ||
![]() |
fcebc9d1ed | ||
![]() |
9350e4f961 | ||
![]() |
387e0ad03e | ||
![]() |
61fec8b290 | ||
![]() |
1228baebf4 | ||
![]() |
a30063e85c | ||
![]() |
524cebac4d | ||
![]() |
c94114a566 | ||
![]() |
b6ec7a9e64 | ||
![]() |
69be7a6d22 | ||
![]() |
58155c35f9 | ||
![]() |
7b2377291f | ||
![]() |
657ee84e39 | ||
![]() |
2e4b545265 | ||
![]() |
2de1d35dd1 | ||
![]() |
2b082b362d | ||
![]() |
dfdd0d6b4b | ||
![]() |
a00e81c03f | ||
![]() |
776e6bb418 | ||
![]() |
b31fca656e | ||
![]() |
fa783a0d2c | ||
![]() |
96c0fbaf10 | ||
![]() |
24f7801ddc | ||
![]() |
8e83e007e9 | ||
![]() |
d0db466e67 | ||
![]() |
3010bd4eb6 | ||
![]() |
069bed8815 | ||
![]() |
d2088ae5f8 | ||
![]() |
0ca5a241bb | ||
![]() |
dff32a8e84 | ||
![]() |
4a20344652 | ||
![]() |
98b969ef06 | ||
![]() |
c8cb8aecf7 | ||
![]() |
73e8875018 | ||
![]() |
02aed9c084 | ||
![]() |
89148f8fff | ||
![]() |
6bde527f5c | ||
![]() |
d62aabc01b | ||
![]() |
82299a3799 | ||
![]() |
c02f30dd7e | ||
![]() |
e91983adb4 | ||
![]() |
ff88359429 | ||
![]() |
5a60d5cbe8 | ||
![]() |
2b41ffe019 | ||
![]() |
1c23e26f93 | ||
![]() |
3d555f951d | ||
![]() |
6d39b4d7cd | ||
![]() |
4fe5d09f01 | ||
![]() |
e52af3bfb4 | ||
![]() |
0467b33cd5 | ||
![]() |
14167f6e13 | ||
![]() |
7a1aba6f81 | ||
![]() |
920f7f2ece | ||
![]() |
06fadbd70f | ||
![]() |
d4f486864f | ||
![]() |
d3a21303d9 | ||
![]() |
e1cbfdd84b | ||
![]() |
87170a4497 | ||
![]() |
ae6f8bd345 | ||
![]() |
b9496e0972 | ||
![]() |
c36a6dcd65 | ||
![]() |
19ca836b78 | ||
![]() |
8a6ea7ab50 | ||
![]() |
6721b8f265 | ||
![]() |
9393521f98 | ||
![]() |
398b24e0ab | ||
![]() |
374bcf8073 | ||
![]() |
7e3859e2f5 | ||
![]() |
490ec0d462 | ||
![]() |
15bf1ee50e | ||
![]() |
6376d92a0d | ||
![]() |
10230b0b4c | ||
![]() |
2495cda5ec | ||
![]() |
ae8ddca040 | ||
![]() |
0212d027fb | ||
![]() |
a3096153ab | ||
![]() |
7434ca9e99 | ||
![]() |
4ac7f7dcf0 | ||
![]() |
e9f5b13aa5 | ||
![]() |
1fbb6d46ea | ||
![]() |
8dbfea75b1 | ||
![]() |
3b3840c087 | ||
![]() |
a21353909d | ||
![]() |
5497ed885a | ||
![]() |
39baea759a | ||
![]() |
80ddb1d262 | ||
![]() |
e24987a610 | ||
![]() |
9e5c276e3b | ||
![]() |
c33d31996d | ||
![]() |
aa1f08fe8a | ||
![]() |
d78689554a | ||
![]() |
5bee1d851c | ||
![]() |
ddb8eef4d1 | ||
![]() |
da513e7347 | ||
![]() |
4279d7fd16 | ||
![]() |
934eab2e8c | ||
![]() |
2a31edc768 | ||
![]() |
fcdd66dc6e | ||
![]() |
a65d3222b9 | ||
![]() |
36179596a0 | ||
![]() |
c083c850c1 | ||
![]() |
ff903d7b5a | ||
![]() |
dd603e1ec2 | ||
![]() |
a2f06b1553 | ||
![]() |
8115d2b3d3 | ||
![]() |
4f97bb9e0b | ||
![]() |
84d24a2c4d | ||
![]() |
b709061656 | ||
![]() |
cd9034b3f1 | ||
![]() |
25d324c73a | ||
![]() |
3a834d1a73 | ||
![]() |
e9fecb817d | ||
![]() |
56e70d7ec4 | ||
![]() |
2e73a85aa9 | ||
![]() |
1e119e9c03 | ||
![]() |
6f6e5c97df | ||
![]() |
6ef99974cf | ||
![]() |
8984b9aef6 | ||
![]() |
63e08b15bc | ||
![]() |
319b2b5d4c | ||
![]() |
bae7bb8ce4 | ||
![]() |
0b44df366c | ||
![]() |
f253c797af | ||
![]() |
0a8b1c2797 | ||
![]() |
3b45fb417b | ||
![]() |
2a2d92e3c5 | ||
![]() |
a320e42ed5 | ||
![]() |
fdef712e01 | ||
![]() |
5717ac19d7 | ||
![]() |
33d7d76fee | ||
![]() |
73bdaa623c | ||
![]() |
8ca8f59a0b | ||
![]() |
745af3c039 | ||
![]() |
5d17e1011a | ||
![]() |
826464c41b | ||
![]() |
a643df8cac | ||
![]() |
24ded99286 | ||
![]() |
6646eee504 | ||
![]() |
f55c10914e | ||
![]() |
b1e768f69e | ||
![]() |
4702f8bd5e | ||
![]() |
69959b2c97 | ||
![]() |
9d6f4f5392 | ||
![]() |
36b9a609bf | ||
![]() |
36ae0c82b6 | ||
![]() |
e11011ee51 | ||
![]() |
9125211a57 | ||
![]() |
3a4ef6ceb3 | ||
![]() |
ca82993278 | ||
![]() |
0925af91e3 | ||
![]() |
80bc32243c | ||
![]() |
f0d232880d | ||
![]() |
7c790dbbd9 | ||
![]() |
899b17e992 | ||
![]() |
d1b4521290 | ||
![]() |
9bb4feef29 | ||
![]() |
4bcdc98a31 | ||
![]() |
26f8c1df92 | ||
![]() |
a481ad73f3 | ||
![]() |
e4ac17fea6 | ||
![]() |
bcd940e95b | ||
![]() |
5365aa4466 | ||
![]() |
a0d106529c | ||
![]() |
bf1a9ec42d | ||
![]() |
fc5d97562f | ||
![]() |
f5c171e44f | ||
![]() |
a3c3f15806 | ||
![]() |
ef58a219ec | ||
![]() |
6708fe36e3 | ||
![]() |
e02fa2824c | ||
![]() |
a20f927082 | ||
![]() |
6d71e3fe81 | ||
![]() |
4056fcd75d | ||
![]() |
1e723cf0e3 | ||
![]() |
ce3f670597 | ||
![]() |
ce3d3d58ec | ||
![]() |
a92cab48e0 | ||
![]() |
ee76317392 | ||
![]() |
380ca13be1 | ||
![]() |
93f4c5e207 | ||
![]() |
e438858da0 | ||
![]() |
428a4dd849 | ||
![]() |
39cc8aaa13 | ||
![]() |
39a62864de | ||
![]() |
71a162a871 | ||
![]() |
05d7eff09a | ||
![]() |
7b8ad0782d | ||
![]() |
df3e9e3a5e | ||
![]() |
8cdc769ec8 | ||
![]() |
76e1304241 | ||
![]() |
eb9b1ff03d | ||
![]() |
b3b12d35fd | ||
![]() |
74485262e7 | ||
![]() |
615e68b29b | ||
![]() |
927b4695c9 | ||
![]() |
11811701d0 | ||
![]() |
05c8022db3 | ||
![]() |
a9ebb147c5 | ||
![]() |
ba8ca4d9ee | ||
![]() |
3574df1385 | ||
![]() |
b4497d231b | ||
![]() |
5aa9b0245a | ||
![]() |
4c72c3aafc | ||
![]() |
bf4f40f991 | ||
![]() |
603334f4f3 | ||
![]() |
46548af165 | ||
![]() |
8ef32b40c8 | ||
![]() |
fb25377087 | ||
![]() |
a75fd2d07e | ||
![]() |
e30f39e97e | ||
![]() |
4818ad7465 | ||
![]() |
5e4e9740c7 | ||
![]() |
d4e41dbf80 | ||
![]() |
cea1a1a15f | ||
![]() |
c2700b14dc | ||
![]() |
07d27170db | ||
![]() |
8eb8c07df6 | ||
![]() |
7bee6f884c | ||
![]() |
78dd20e314 | ||
![]() |
2a011b6448 | ||
![]() |
5c90370ec8 | ||
![]() |
120465b88d | ||
![]() |
c77292439a | ||
![]() |
0a0209f81a | ||
![]() |
69a7ed8a5c | ||
![]() |
8df35ab488 | ||
![]() |
a12567d0a8 | ||
![]() |
64fe190119 | ||
![]() |
e3ede66943 | ||
![]() |
2672b800d4 | ||
![]() |
c60d4bda92 | ||
![]() |
db9d0f2639 | ||
![]() |
02d4045ec3 | ||
![]() |
a308ea6927 | ||
![]() |
edc5e5e812 | ||
![]() |
23b65cb479 | ||
![]() |
e5eabd2143 | ||
![]() |
b0dd043975 | ||
![]() |
435a1096ed | ||
![]() |
21a9084ca0 | ||
![]() |
10d9135d86 | ||
![]() |
272d8b29f3 | ||
![]() |
3d665b9eec | ||
![]() |
c563f484c9 | ||
![]() |
38268ea4ea | ||
![]() |
c1ad64cddf | ||
![]() |
b898cd2a3a | ||
![]() |
937b31d845 | ||
![]() |
e4e655493b | ||
![]() |
387d2dcc2e | ||
![]() |
8abe33d48a | ||
![]() |
860442d5c4 | ||
![]() |
ce5183ce16 | ||
![]() |
3e69b04b86 | ||
![]() |
8b9cd4f122 | ||
![]() |
c0e3ccdb83 | ||
![]() |
e8cc85c487 | ||
![]() |
b3eff41692 | ||
![]() |
1ea63f185c | ||
![]() |
a513d5c09a | ||
![]() |
fb8216c102 | ||
![]() |
4f381d01df | ||
![]() |
de3382226e | ||
![]() |
77be830b72 | ||
![]() |
09c0e1320f | ||
![]() |
cc4ee59542 | ||
![]() |
1f448744f3 | ||
![]() |
ee2c257057 | ||
![]() |
be8439d4ac | ||
![]() |
981f2b193c | ||
![]() |
39087e09ce | ||
![]() |
59960efb9c | ||
![]() |
5a53bb5981 | ||
![]() |
a67fe69cbb | ||
![]() |
9ce2b0765f | ||
![]() |
2e53a48504 | ||
![]() |
8e4db0c3ec | ||
![]() |
4072b06faf | ||
![]() |
a2cf7ece70 | ||
![]() |
734fe3afde | ||
![]() |
7f3bc91c1d | ||
![]() |
9c2c95757d | ||
![]() |
b5ed6c586a | ||
![]() |
35033d1f76 | ||
![]() |
9e41d0c5b0 | ||
![]() |
62e92fada9 | ||
![]() |
ae0a1a657f | ||
![]() |
81e511ba8e | ||
![]() |
d89cb91c8c | ||
![]() |
dc31b6e6fe | ||
![]() |
930a32de1a | ||
![]() |
e40f2ed8e3 | ||
![]() |
abbd3d1078 | ||
![]() |
63c9948456 | ||
![]() |
b6c81d779a | ||
![]() |
2480c83169 | ||
![]() |
334cc66cf6 | ||
![]() |
3cf189ad94 | ||
![]() |
6ffb94a0f5 | ||
![]() |
3593826441 | ||
![]() |
0a0a62f238 | ||
![]() |
41ce9913d2 | ||
![]() |
b77c42384d | ||
![]() |
138bb12f98 | ||
![]() |
4fe2859f4e | ||
![]() |
0768b2b4bc | ||
![]() |
e6f1772a93 | ||
![]() |
5374b2b3b9 | ||
![]() |
1196788856 | ||
![]() |
9f3f47eb80 | ||
![]() |
1a90a478f2 | ||
![]() |
ee773f3b63 | ||
![]() |
5ffc27f60c | ||
![]() |
4c13dfb43c | ||
![]() |
bc099f0d81 | ||
![]() |
b26dd0af19 | ||
![]() |
0dee5bd763 | ||
![]() |
0765387ad8 | ||
![]() |
a07517bd3c | ||
![]() |
e5f0d80d96 | ||
![]() |
2fc5e3b7d9 | ||
![]() |
778bc46848 | ||
![]() |
882586b246 | ||
![]() |
b7c07a2555 | ||
![]() |
814b504fa9 | ||
![]() |
7ae430e7a8 | ||
![]() |
0e7e95ba20 | ||
![]() |
e577d8acb2 | ||
![]() |
0a76ab5054 | ||
![]() |
03c5596e04 | ||
![]() |
3af4e14e83 | ||
![]() |
7c8cf57820 | ||
![]() |
8d84a8a62e | ||
![]() |
08c45060bd | ||
![]() |
7ca8d2811b | ||
![]() |
bb6898b032 | ||
![]() |
cd86c6814e | ||
![]() |
b67e116650 | ||
![]() |
57ce411fb6 | ||
![]() |
85ed4d9e8d | ||
![]() |
ccb39da569 | ||
![]() |
dd7ba64d32 | ||
![]() |
de3edb1654 | ||
![]() |
d262151727 | ||
![]() |
a37c90af96 | ||
![]() |
0a3a752b4c | ||
![]() |
0a34f427f8 | ||
![]() |
157740e374 | ||
![]() |
b0e994f3f5 | ||
![]() |
f374852801 | ||
![]() |
709f034f2e | ||
![]() |
6d6deb8c66 | ||
![]() |
5771b417bc | ||
![]() |
51efcefdab | ||
![]() |
d31ab5139d | ||
![]() |
ce18183daa | ||
![]() |
b8b73cf880 | ||
![]() |
5291e6c1f3 | ||
![]() |
626a9f06c4 | ||
![]() |
72338eb5b8 | ||
![]() |
7bd77c6e99 | ||
![]() |
69151b962a | ||
![]() |
86305d4fe4 | ||
![]() |
d5c3850a3f | ||
![]() |
3e645b6175 | ||
![]() |
89dc78bc05 | ||
![]() |
164c403d05 | ||
![]() |
5e8007453f | ||
![]() |
0a0d97b084 | ||
![]() |
eb604ed92d | ||
![]() |
c47828dbaa | ||
![]() |
ea437dc745 | ||
![]() |
c16a208b39 | ||
![]() |
55d803b2a0 | ||
![]() |
611f6f2829 | ||
![]() |
b94df76731 | ||
![]() |
218619e7f0 | ||
![]() |
273eed901a | ||
![]() |
8ea712a937 | ||
![]() |
658449a7a0 | ||
![]() |
968c471591 | ||
![]() |
b4665f3907 | ||
![]() |
496cee1ec4 | ||
![]() |
0f8c80f3ba | ||
![]() |
6c28f82239 | ||
![]() |
def32abb57 | ||
![]() |
f57a241b9e | ||
![]() |
11a7e8b15d | ||
![]() |
fa4f7697b7 | ||
![]() |
6098b7de8e | ||
![]() |
0a382ce54d | ||
![]() |
dd53aaa30c | ||
![]() |
31e175a15a | ||
![]() |
4c80727bcc | ||
![]() |
b2c3157361 | ||
![]() |
dc4f38ebd0 | ||
![]() |
7c9437c6ee | ||
![]() |
9ce9e10dfd | ||
![]() |
4e94043bca | ||
![]() |
749d45bf13 | ||
![]() |
ce99b3e259 | ||
![]() |
2c84daefab | ||
![]() |
dc1933fa88 | ||
![]() |
6970cebf80 | ||
![]() |
a234006de2 | ||
![]() |
2484149323 | ||
![]() |
778148424c | ||
![]() |
55f4a2395e | ||
![]() |
5a45d47ed8 | ||
![]() |
da601d1483 | ||
![]() |
e98a1272e9 | ||
![]() |
90e9cf788b | ||
![]() |
ec387c3010 | ||
![]() |
7e5a960c98 | ||
![]() |
f1bcbf2416 | ||
![]() |
bce144e197 | ||
![]() |
86a3735d83 | ||
![]() |
decf254e5f | ||
![]() |
e10fe16f21 | ||
![]() |
996891a740 | ||
![]() |
7385d026ea | ||
![]() |
09f43d6f3c | ||
![]() |
6906e757dd | ||
![]() |
963d242afa | ||
![]() |
3ed7cbe2ed | ||
![]() |
0da924f10b | ||
![]() |
76411da0a7 | ||
![]() |
ce87a72cf0 | ||
![]() |
f8c9e2f295 | ||
![]() |
00af027e51 | ||
![]() |
c91fce3281 | ||
![]() |
fb6df18ce9 | ||
![]() |
31f5c6f938 | ||
![]() |
d3a44b2992 | ||
![]() |
b537a03e6d | ||
![]() |
46093379e4 | ||
![]() |
1b17d90504 | ||
![]() |
7d42dd7ac2 | ||
![]() |
f35dcfcfd3 | ||
![]() |
c4f223c38a | ||
![]() |
71362f2c76 | ||
![]() |
96beac9fd9 | ||
![]() |
608c0e5076 | ||
![]() |
16ef6d82d2 | ||
![]() |
51940222be | ||
![]() |
21f3c4820b | ||
![]() |
214c6f919e | ||
![]() |
d9d438d571 | ||
![]() |
cf60d1f55c | ||
![]() |
f9aa12cbad | ||
![]() |
76266cc18b | ||
![]() |
50b9506ff3 | ||
![]() |
754cd64213 | ||
![]() |
113b62ee77 | ||
![]() |
d9874c4c3e | ||
![]() |
ca44e858c5 | ||
![]() |
c7ca4de307 | ||
![]() |
b77146a4e0 | ||
![]() |
45b4800378 | ||
![]() |
7f9232d2b9 | ||
![]() |
d90426f745 | ||
![]() |
c2deabb672 | ||
![]() |
ead5993f3e | ||
![]() |
1bcd74e8fa | ||
![]() |
118da3c275 | ||
![]() |
d7bb9013d4 | ||
![]() |
812c46d82b | ||
![]() |
c0462b28cd | ||
![]() |
82b2f66920 | ||
![]() |
01da42e1b6 | ||
![]() |
d652d22547 | ||
![]() |
baea84abe6 | ||
![]() |
c2d705a42a | ||
![]() |
f10b433e1f | ||
![]() |
67f562a846 | ||
![]() |
1edec61133 | ||
![]() |
c13a33bf71 | ||
![]() |
2ae93ae7b1 | ||
![]() |
8451020afe | ||
![]() |
a48e568efc | ||
![]() |
dee2808cb5 | ||
![]() |
06a2ab26a2 | ||
![]() |
45de0f2f39 | ||
![]() |
bac5f704dc | ||
![]() |
79669a5d04 | ||
![]() |
a6e712c9ea | ||
![]() |
069fe99699 | ||
![]() |
4754f067ad | ||
![]() |
dce9818812 | ||
![]() |
d054b6dbb7 | ||
![]() |
3093165325 | ||
![]() |
fd9c5bd412 | ||
![]() |
9a8850fecd | ||
![]() |
b12175ab9a | ||
![]() |
b52f90187b | ||
![]() |
4eb02f474d | ||
![]() |
dfdcddfd0b | ||
![]() |
0391277bad | ||
![]() |
73643b9bfe | ||
![]() |
93a52b8382 | ||
![]() |
7a91bb1f6c | ||
![]() |
26efa998a1 | ||
![]() |
fc9f3fee0a | ||
![]() |
ec19bd570b | ||
![]() |
3335bad9e1 | ||
![]() |
71ae334e24 | ||
![]() |
0807651fbd | ||
![]() |
7026d42d77 | ||
![]() |
31047b9ec2 | ||
![]() |
714791de8f | ||
![]() |
c544fff2b2 | ||
![]() |
fc45670686 | ||
![]() |
5cefa0a2ee |
49
.devcontainer/Dockerfile
Normal file
49
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,49 @@
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-3.8
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
# Install Node/Yarn for Frontent
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
curl \
|
||||
git \
|
||||
apt-utils \
|
||||
apt-transport-https \
|
||||
&& curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& echo "deb https://dl.yarnpkg.com/debian/ stable main" | tee /etc/apt/sources.list.d/yarn.list \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
nodejs \
|
||||
yarn \
|
||||
&& curl -o - https://raw.githubusercontent.com/nvm-sh/nvm/v0.35.3/install.sh | bash \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
ENV NVM_DIR /root/.nvm
|
||||
|
||||
# Install docker
|
||||
# https://docs.docker.com/engine/installation/linux/docker-ce/ubuntu/
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
apt-transport-https \
|
||||
ca-certificates \
|
||||
curl \
|
||||
software-properties-common \
|
||||
gpg-agent \
|
||||
&& curl -fsSL https://download.docker.com/linux/debian/gpg | apt-key add - \
|
||||
&& add-apt-repository "deb https://download.docker.com/linux/debian $(lsb_release -cs) stable" \
|
||||
&& apt-get update && apt-get install -y --no-install-recommends \
|
||||
docker-ce \
|
||||
docker-ce-cli \
|
||||
containerd.io \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install tools
|
||||
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||
jq \
|
||||
dbus \
|
||||
network-manager \
|
||||
libpulse0 \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements.txt requirements_tests.txt ./
|
||||
RUN pip3 install -U setuptools pip \
|
||||
&& pip3 install -r requirements.txt -r requirements_tests.txt \
|
||||
&& pip3 install tox \
|
||||
&& rm -f requirements.txt requirements_tests.txt
|
32
.devcontainer/devcontainer.json
Normal file
32
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "Supervisor dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"appPort": "9123:8123",
|
||||
"postCreateCommand": "pre-commit install",
|
||||
"runArgs": ["-e", "GIT_EDITOR=code --wait", "--privileged"],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"visualstudioexptteam.vscodeintellicode",
|
||||
"esbenp.prettier-vscode"
|
||||
],
|
||||
"settings": {
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"python.pythonPath": "/usr/local/bin/python3",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": ["--target-version", "py38"],
|
||||
"python.formatting.blackPath": "/usr/local/bin/black",
|
||||
"python.linting.banditPath": "/usr/local/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"python.linting.pylintPath": "/usr/local/bin/pylint",
|
||||
"python.linting.pydocstylePath": "/usr/local/bin/pydocstyle"
|
||||
}
|
||||
}
|
@@ -1,13 +1,23 @@
|
||||
# General files
|
||||
.git
|
||||
.github
|
||||
.devcontainer
|
||||
.vscode
|
||||
|
||||
# Test related files
|
||||
.tox
|
||||
|
||||
# Temporary files
|
||||
**/__pycache__
|
||||
.pytest_cache
|
||||
|
||||
# virtualenv
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Data
|
||||
home-assistant-polymer/
|
||||
script/
|
||||
tests/
|
||||
|
||||
# Test ENV
|
||||
data/
|
||||
|
13
.github/ISSUE_TEMPLATE.md
vendored
13
.github/ISSUE_TEMPLATE.md
vendored
@@ -1,15 +1,15 @@
|
||||
<!-- READ THIS FIRST:
|
||||
- If you need additional help with this template please refer to https://www.home-assistant.io/help/reporting_issues/
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Do not report issues for components here, plaese refer to https://github.com/home-assistant/home-assistant/issues
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/core/releases
|
||||
- Do not report issues for integrations here, please refer to https://github.com/home-assistant/core/issues
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
- If you have a problem with a Add-on, make a issue on there repository.
|
||||
- If you have a problem with an add-on, make an issue in its repository.
|
||||
-->
|
||||
|
||||
**Home Assistant release with the issue:**
|
||||
<!--
|
||||
- Frontend -> Developer tools -> Info
|
||||
- Frontend -> Configuration -> Info
|
||||
- Or use this command: hass --version
|
||||
-->
|
||||
|
||||
@@ -20,10 +20,9 @@ Please provide details about your environment.
|
||||
|
||||
**Supervisor logs:**
|
||||
<!--
|
||||
- Frontend -> Hass.io -> System
|
||||
- Or use this command: hassio su logs
|
||||
- Frontend -> Supervisor -> System
|
||||
- Or use this command: ha supervisor logs
|
||||
-->
|
||||
|
||||
|
||||
**Description of problem:**
|
||||
|
||||
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: pip
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: daily
|
||||
time: "06:00"
|
||||
open-pull-requests-limit: 10
|
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2020-01-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
16
.github/main.workflow
vendored
16
.github/main.workflow
vendored
@@ -1,16 +0,0 @@
|
||||
workflow "tox" {
|
||||
on = "push"
|
||||
resolves = [
|
||||
"Python 3.7",
|
||||
"Json Files",
|
||||
]
|
||||
}
|
||||
|
||||
action "Python 3.7" {
|
||||
uses = "home-assistant/actions/py37-tox@master"
|
||||
}
|
||||
|
||||
action "Json Files" {
|
||||
uses = "home-assistant/actions/jq@master"
|
||||
args = "**/*.json"
|
||||
}
|
18
.github/stale.yml
vendored
Normal file
18
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
- rfc
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
432
.github/workflows/ci.yaml
vendored
Normal file
432
.github/workflows/ci.yaml
vendored
Normal file
@@ -0,0 +1,432 @@
|
||||
name: CI
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- dev
|
||||
- master
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.8
|
||||
PRE_COMMIT_HOME: ~/.cache/pre-commit
|
||||
|
||||
jobs:
|
||||
# Separate job to pre-populate the base dependency cache
|
||||
# This prevent upcoming jobs to do the same individually
|
||||
prepare:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Prepare Python ${{ matrix.python-version }} dependencies
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.1.2
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
pip install -U pip setuptools
|
||||
pip install -r requirements.txt -r requirements_tests.txt
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pre-commit-
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-black:
|
||||
name: Check black
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run black
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
black --target-version py38 --check supervisor tests setup.py
|
||||
|
||||
lint-dockerfile:
|
||||
name: Check Dockerfile
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Register hadolint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||
- name: Check Dockerfile
|
||||
uses: docker://hadolint/hadolint:v1.18.0
|
||||
with:
|
||||
args: hadolint Dockerfile
|
||||
|
||||
lint-executable-shebangs:
|
||||
name: Check executables
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files
|
||||
|
||||
lint-flake8:
|
||||
name: Check flake8
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register flake8 problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/flake8.json"
|
||||
- name: Run flake8
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
flake8 supervisor tests
|
||||
|
||||
lint-isort:
|
||||
name: Check isort
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run isort
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual isort --all-files --show-diff-on-failure
|
||||
|
||||
lint-json:
|
||||
name: Check JSON
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files
|
||||
|
||||
lint-pylint:
|
||||
name: Check pylint
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Register pylint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pylint.json"
|
||||
- name: Run pylint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pylint supervisor tests
|
||||
|
||||
lint-pyupgrade:
|
||||
name: Check pyupgrade
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_HOME }}
|
||||
key: |
|
||||
${{ runner.os }}-pre-commit-${{ hashFiles('.pre-commit-config.yaml') }}
|
||||
- name: Fail job if cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Run pyupgrade
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual pyupgrade --all-files --show-diff-on-failure
|
||||
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
name: Run tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Install additional system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends libpulse0 libudev1
|
||||
- name: Register Python problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/python.json"
|
||||
- name: Install Pytest Annotation plugin
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
# Ideally this should be part of our dependencies
|
||||
# However this plugin is fairly new and doesn't run correctly
|
||||
# on a non-GitHub environment.
|
||||
pip install pytest-github-actions-annotate-failures
|
||||
- name: Run pytest
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pytest \
|
||||
-qq \
|
||||
--timeout=10 \
|
||||
--durations=10 \
|
||||
--cov supervisor \
|
||||
-o console_output_style=count \
|
||||
tests
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v2.1.4
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
|
||||
coverage:
|
||||
name: Process test coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs: pytest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v2.1.2
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2
|
||||
with:
|
||||
path: venv
|
||||
key: |
|
||||
${{ runner.os }}-venv-${{ steps.python.outputs.python-version }}-${{ hashFiles('requirements.txt') }}-${{ hashFiles('requirements_tests.txt') }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python virtual environment from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v2
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
coverage combine coverage*/.coverage*
|
||||
coverage report
|
||||
coverage xml
|
||||
- name: Upload coverage to Codecov
|
||||
uses: codecov/codecov-action@v1.0.13
|
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
14
.github/workflows/matchers/check-executables-have-shebangs.json
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-executables-have-shebangs",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+)$",
|
||||
"file": 1,
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/check-json.json
vendored
Normal file
16
.github/workflows/matchers/check-json.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "check-json",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):\\s(.+\\sline\\s(\\d+)\\scolumn\\s(\\d+).+)$",
|
||||
"file": 1,
|
||||
"message": 2,
|
||||
"line": 3,
|
||||
"column": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
30
.github/workflows/matchers/flake8.json
vendored
Normal file
30
.github/workflows/matchers/flake8.json
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "flake8-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s(E\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "flake8-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.*):(\\d+):(\\d+):\\s([CDFNW]\\d{3}\\s.*)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
16
.github/workflows/matchers/hadolint.json
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "hadolint",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+)\\s+((DL\\d{4}).+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"message": 3,
|
||||
"code": 4
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
32
.github/workflows/matchers/pylint.json
vendored
Normal file
32
.github/workflows/matchers/pylint.json
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "pylint-error",
|
||||
"severity": "error",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([EF]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"owner": "pylint-warning",
|
||||
"severity": "warning",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^(.+):(\\d+):(\\d+):\\s(([CRW]\\d{4}):\\s.+)$",
|
||||
"file": 1,
|
||||
"line": 2,
|
||||
"column": 3,
|
||||
"message": 4,
|
||||
"code": 5
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
18
.github/workflows/matchers/python.json
vendored
Normal file
18
.github/workflows/matchers/python.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"problemMatcher": [
|
||||
{
|
||||
"owner": "python",
|
||||
"pattern": [
|
||||
{
|
||||
"regexp": "^\\s*File\\s\\\"(.*)\\\",\\sline\\s(\\d+),\\sin\\s(.*)$",
|
||||
"file": 1,
|
||||
"line": 2
|
||||
},
|
||||
{
|
||||
"regexp": "^\\s*raise\\s(.*)\\(\\'(.*)\\'\\)$",
|
||||
"message": 2
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
15
.github/workflows/release-drafter.yml
vendored
Normal file
15
.github/workflows/release-drafter.yml
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
name: Release Drafter
|
||||
|
||||
on:
|
||||
push:
|
||||
# branches to consider in the event; optional, defaults to all
|
||||
branches:
|
||||
- dev
|
||||
|
||||
jobs:
|
||||
update_release_draft:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: release-drafter/release-drafter@v5
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
21
.github/workflows/sentry.yaml
vendored
Normal file
21
.github/workflows/sentry.yaml
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
name: Sentry Release
|
||||
|
||||
# yamllint disable-line rule:truthy
|
||||
on:
|
||||
release:
|
||||
types: [published, prereleased]
|
||||
|
||||
jobs:
|
||||
createSentryRelease:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2
|
||||
- name: Sentry Release
|
||||
uses: getsentry/action-release@v1.0.1
|
||||
env:
|
||||
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
|
||||
SENTRY_ORG: ${{ secrets.SENTRY_ORG }}
|
||||
SENTRY_PROJECT: ${{ secrets.SENTRY_PROJECT }}
|
||||
with:
|
||||
environment: production
|
9
.gitignore
vendored
9
.gitignore
vendored
@@ -92,4 +92,11 @@ ENV/
|
||||
.pylint.d/
|
||||
|
||||
# VS Code
|
||||
.vscode/
|
||||
.vscode/*
|
||||
!.vscode/cSpell.json
|
||||
!.vscode/tasks.json
|
||||
!.vscode/launch.json
|
||||
|
||||
# mypy
|
||||
/.mypy_cache/*
|
||||
/.dmypy.json
|
||||
|
5
.hadolint.yaml
Normal file
5
.hadolint.yaml
Normal file
@@ -0,0 +1,5 @@
|
||||
ignored:
|
||||
- DL3018
|
||||
- DL3006
|
||||
- DL3013
|
||||
- SC2155
|
34
.pre-commit-config.yaml
Normal file
34
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,34 @@
|
||||
repos:
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 20.8b1
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
- --target-version
|
||||
- py38
|
||||
files: ^((supervisor|tests)/.+)?[^/]+\.py$
|
||||
- repo: https://gitlab.com/pycqa/flake8
|
||||
rev: 3.8.3
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies:
|
||||
- flake8-docstrings==1.5.0
|
||||
- pydocstyle==5.0.2
|
||||
files: ^(supervisor|script|tests)/.+\.py$
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.1.0
|
||||
hooks:
|
||||
- id: check-executables-have-shebangs
|
||||
stages: [manual]
|
||||
- id: check-json
|
||||
- repo: https://github.com/pre-commit/mirrors-isort
|
||||
rev: v4.3.21
|
||||
hooks:
|
||||
- id: isort
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.6.2
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py37-plus]
|
18
.vscode/launch.json
vendored
Normal file
18
.vscode/launch.json
vendored
Normal file
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Supervisor remote debug",
|
||||
"type": "python",
|
||||
"request": "attach",
|
||||
"port": 33333,
|
||||
"host": "172.30.32.2",
|
||||
"pathMappings": [
|
||||
{
|
||||
"localRoot": "${workspaceFolder}",
|
||||
"remoteRoot": "/usr/src/supervisor"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
90
.vscode/tasks.json
vendored
Normal file
90
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,90 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run Testenv",
|
||||
"type": "shell",
|
||||
"command": "./scripts/test_env.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Testenv CLI",
|
||||
"type": "shell",
|
||||
"command": "docker exec -ti hassio_cli /usr/bin/cli.sh",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Update UI",
|
||||
"type": "shell",
|
||||
"command": "./scripts/update-frontend.sh",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 supervisor tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint supervisor",
|
||||
"dependsOn": ["Install all Requirements"],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
57
Dockerfile
57
Dockerfile
@@ -1,33 +1,40 @@
|
||||
ARG BUILD_FROM
|
||||
FROM $BUILD_FROM
|
||||
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=10000 \
|
||||
SUPERVISOR_API=http://localhost
|
||||
|
||||
# Install base
|
||||
RUN apk add --no-cache \
|
||||
openssl \
|
||||
libffi \
|
||||
musl \
|
||||
git \
|
||||
socat \
|
||||
glib \
|
||||
libstdc++ \
|
||||
eudev-libs
|
||||
RUN \
|
||||
apk add --no-cache \
|
||||
eudev \
|
||||
eudev-libs \
|
||||
git \
|
||||
glib \
|
||||
libffi \
|
||||
libpulse \
|
||||
musl \
|
||||
openssl
|
||||
|
||||
ARG BUILD_ARCH
|
||||
WORKDIR /usr/src
|
||||
|
||||
# Install requirements
|
||||
COPY requirements.txt /usr/src/
|
||||
RUN apk add --no-cache --virtual .build-dependencies \
|
||||
make \
|
||||
g++ \
|
||||
openssl-dev \
|
||||
libffi-dev \
|
||||
musl-dev \
|
||||
&& export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir -r /usr/src/requirements.txt \
|
||||
&& apk del .build-dependencies \
|
||||
&& rm -f /usr/src/requirements.txt
|
||||
COPY requirements.txt .
|
||||
RUN \
|
||||
export MAKEFLAGS="-j$(nproc)" \
|
||||
&& pip3 install --no-cache-dir --no-index --only-binary=:all: --find-links \
|
||||
"https://wheels.home-assistant.io/alpine-$(cut -d '.' -f 1-2 < /etc/alpine-release)/${BUILD_ARCH}/" \
|
||||
-r ./requirements.txt \
|
||||
&& rm -f requirements.txt
|
||||
|
||||
# Install HassIO
|
||||
COPY . /usr/src/hassio
|
||||
RUN pip3 install --no-cache-dir /usr/src/hassio \
|
||||
&& rm -rf /usr/src/hassio
|
||||
# Install Home Assistant Supervisor
|
||||
COPY . supervisor
|
||||
RUN \
|
||||
pip3 install --no-cache-dir -e ./supervisor \
|
||||
&& python3 -m compileall ./supervisor/supervisor
|
||||
|
||||
CMD [ "python3", "-m", "hassio" ]
|
||||
|
||||
WORKDIR /
|
||||
COPY rootfs /
|
||||
|
4
LICENSE
4
LICENSE
@@ -178,7 +178,7 @@
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "{}"
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
@@ -186,7 +186,7 @@
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright 2017 Pascal Vizeli
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
|
@@ -1,3 +1,3 @@
|
||||
include LICENSE.md
|
||||
graft hassio
|
||||
graft supervisor
|
||||
recursive-exclude * *.py[co]
|
||||
|
28
README.md
28
README.md
@@ -1,28 +1,26 @@
|
||||
# Hass.io
|
||||
# Home Assistant Supervisor
|
||||
|
||||
## First private cloud solution for home automation
|
||||
|
||||
Hass.io is a Docker-based system for managing your Home Assistant installation
|
||||
and related applications. The system is controlled via Home Assistant which
|
||||
communicates with the Supervisor. The Supervisor provides an API to manage the
|
||||
installation. This includes changing network settings or installing
|
||||
and updating software.
|
||||
|
||||

|
||||
Home Assistant (former Hass.io) is a container-based system for managing your
|
||||
Home Assistant Core installation and related applications. The system is
|
||||
controlled via Home Assistant which communicates with the Supervisor. The
|
||||
Supervisor provides an API to manage the installation. This includes changing
|
||||
network settings or installing and updating software.
|
||||
|
||||
## Installation
|
||||
|
||||
Installation instructions can be found at <https://home-assistant.io/hassio>.
|
||||
Installation instructions can be found at https://home-assistant.io/hassio.
|
||||
|
||||
## Development
|
||||
|
||||
The development of the supervisor is a bit tricky. Not difficult but tricky.
|
||||
The development of the Supervisor is not difficult but tricky.
|
||||
|
||||
- You can use the builder to build your supervisor: https://github.com/home-assistant/hassio-build/tree/master/builder
|
||||
- Go into a HassOS device or VM and pull your supervisor.
|
||||
- Set the developer modus on updater.json
|
||||
- You can use the builder to create your Supervisor: https://github.com/home-assistant/hassio-builder
|
||||
- Access a HassOS device or VM and pull your Supervisor.
|
||||
- Set the developer modus with the CLI tool: `ha supervisor options --channel=dev`
|
||||
- Tag it as `homeassistant/xy-hassio-supervisor:latest`
|
||||
- Restart the service like `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Restart the service with `systemctl restart hassos-supervisor | journalctl -fu hassos-supervisor`
|
||||
- Test your changes
|
||||
|
||||
Small Bugfix or improvements, make a PR. Significant change makes first an RFC.
|
||||
For small bugfixes or improvements, make a PR. For significant changes open a RFC first, please. Thanks.
|
||||
|
52
azure-pipelines-ci.yml
Normal file
52
azure-pipelines-ci.yml
Normal file
@@ -0,0 +1,52 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- master
|
||||
- dev
|
||||
pr:
|
||||
- dev
|
||||
variables:
|
||||
- name: versionHadolint
|
||||
value: "v1.16.3"
|
||||
|
||||
jobs:
|
||||
- job: "Tox"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y libpulse0 libudev1
|
||||
displayName: "Install Host library"
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.8"
|
||||
inputs:
|
||||
versionSpec: "3.8"
|
||||
- script: pip install tox
|
||||
displayName: "Install Tox"
|
||||
- script: tox
|
||||
displayName: "Run Tox"
|
||||
- job: "JQ"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo apt-get install -y jq
|
||||
displayName: "Install JQ"
|
||||
- bash: |
|
||||
shopt -s globstar
|
||||
cat **/*.json | jq '.'
|
||||
displayName: "Run JQ"
|
||||
- job: "Hadolint"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker pull hadolint/hadolint:$(versionHadolint)
|
||||
displayName: "Install Hadolint"
|
||||
- script: |
|
||||
sudo docker run --rm -i \
|
||||
-v $(pwd)/.hadolint.yaml:/.hadolint.yaml:ro \
|
||||
hadolint/hadolint:$(versionHadolint) < Dockerfile
|
||||
displayName: "Run Hadolint"
|
53
azure-pipelines-release.yml
Normal file
53
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,53 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- "*"
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: "7.0"
|
||||
- group: docker
|
||||
|
||||
jobs:
|
||||
- job: "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: "Use Python 3.8"
|
||||
inputs:
|
||||
versionSpec: "3.8"
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${branch_version}" == "dev" ]; then
|
||||
exit 0
|
||||
elif [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: "Check version of branch/tag"
|
||||
- job: "Release"
|
||||
dependsOn:
|
||||
- "VersionValidate"
|
||||
pool:
|
||||
vmImage: "ubuntu-latest"
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: "Docker hub login"
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: "Install Builder"
|
||||
- script: |
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw -v $(pwd):/data:ro \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--generic $(Build.SourceBranchName) --all -t /data
|
||||
displayName: "Build Release"
|
27
azure-pipelines-wheels.yml
Normal file
27
azure-pipelines-wheels.yml
Normal file
@@ -0,0 +1,27 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.13.0-3.8-alpine3.12'
|
||||
resources:
|
||||
repositories:
|
||||
- repository: azure
|
||||
type: github
|
||||
name: 'home-assistant/ci-azure'
|
||||
endpoint: 'home-assistant'
|
||||
|
||||
|
||||
jobs:
|
||||
- template: templates/azp-job-wheels.yaml@azure
|
||||
parameters:
|
||||
builderVersion: '$(versionWheels)'
|
||||
builderApk: 'build-base;libffi-dev;openssl-dev'
|
||||
builderPip: 'Cython'
|
||||
skipBinary: 'aiohttp'
|
||||
wheelsRequirement: 'requirements.txt'
|
13
build.json
Normal file
13
build.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"image": "homeassistant/{arch}-hassio-supervisor",
|
||||
"build_from": {
|
||||
"aarch64": "homeassistant/aarch64-base-python:3.8-alpine3.12",
|
||||
"armhf": "homeassistant/armhf-base-python:3.8-alpine3.12",
|
||||
"armv7": "homeassistant/armv7-base-python:3.8-alpine3.12",
|
||||
"amd64": "homeassistant/amd64-base-python:3.8-alpine3.12",
|
||||
"i386": "homeassistant/i386-base-python:3.8-alpine3.12"
|
||||
},
|
||||
"labels": {
|
||||
"io.hass.type": "supervisor"
|
||||
}
|
||||
}
|
11
codecov.yaml
Normal file
11
codecov.yaml
Normal file
@@ -0,0 +1,11 @@
|
||||
codecov:
|
||||
branch: dev
|
||||
coverage:
|
||||
status:
|
||||
project:
|
||||
default:
|
||||
target: 40
|
||||
threshold: 0.09
|
||||
comment: false
|
||||
github_checks:
|
||||
annotations: false
|
@@ -1 +0,0 @@
|
||||
"""Init file for Hass.io."""
|
@@ -1,158 +0,0 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from .addon import Addon
|
||||
from .repository import Repository
|
||||
from .data import AddonsData
|
||||
from ..const import REPOSITORY_CORE, REPOSITORY_LOCAL, BOOT_AUTO, STATE_STARTED
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BUILTIN_REPOSITORIES = set((REPOSITORY_CORE, REPOSITORY_LOCAL))
|
||||
|
||||
|
||||
class AddonManager(CoreSysAttributes):
|
||||
"""Manage add-ons inside Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.data = AddonsData(coresys)
|
||||
self.addons_obj = {}
|
||||
self.repositories_obj = {}
|
||||
|
||||
@property
|
||||
def list_addons(self):
|
||||
"""Return a list of all add-ons."""
|
||||
return list(self.addons_obj.values())
|
||||
|
||||
@property
|
||||
def list_installed(self):
|
||||
"""Return a list of installed add-ons."""
|
||||
return [addon for addon in self.addons_obj.values()
|
||||
if addon.is_installed]
|
||||
|
||||
@property
|
||||
def list_repositories(self):
|
||||
"""Return list of add-on repositories."""
|
||||
return list(self.repositories_obj.values())
|
||||
|
||||
def get(self, addon_slug):
|
||||
"""Return an add-on from slug."""
|
||||
return self.addons_obj.get(addon_slug)
|
||||
|
||||
def from_token(self, token):
|
||||
"""Return an add-on from Hass.io token."""
|
||||
for addon in self.list_addons:
|
||||
if addon.is_installed and token == addon.hassio_token:
|
||||
return addon
|
||||
return None
|
||||
|
||||
async def load(self):
|
||||
"""Start up add-on management."""
|
||||
self.data.reload()
|
||||
|
||||
# Init Hass.io built-in repositories
|
||||
repositories = \
|
||||
set(self.sys_config.addons_repositories) | BUILTIN_REPOSITORIES
|
||||
|
||||
# Init custom repositories and load add-ons
|
||||
await self.load_repositories(repositories)
|
||||
|
||||
async def reload(self):
|
||||
"""Update add-ons from repository and reload list."""
|
||||
tasks = [repository.update() for repository in
|
||||
self.repositories_obj.values()]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# read data from repositories
|
||||
self.data.reload()
|
||||
|
||||
# update addons
|
||||
await self.load_addons()
|
||||
|
||||
async def load_repositories(self, list_repositories):
|
||||
"""Add a new custom repository."""
|
||||
new_rep = set(list_repositories)
|
||||
old_rep = set(self.repositories_obj)
|
||||
|
||||
# add new repository
|
||||
async def _add_repository(url):
|
||||
"""Helper function to async add repository."""
|
||||
repository = Repository(self.coresys, url)
|
||||
if not await repository.load():
|
||||
_LOGGER.error("Can't load from repository %s", url)
|
||||
return
|
||||
self.repositories_obj[url] = repository
|
||||
|
||||
# don't add built-in repository to config
|
||||
if url not in BUILTIN_REPOSITORIES:
|
||||
self.sys_config.add_addon_repository(url)
|
||||
|
||||
tasks = [_add_repository(url) for url in new_rep - old_rep]
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# del new repository
|
||||
for url in old_rep - new_rep - BUILTIN_REPOSITORIES:
|
||||
self.repositories_obj.pop(url).remove()
|
||||
self.sys_config.drop_addon_repository(url)
|
||||
|
||||
# update data
|
||||
self.data.reload()
|
||||
await self.load_addons()
|
||||
|
||||
async def load_addons(self):
|
||||
"""Update/add internal add-on store."""
|
||||
all_addons = set(self.data.system) | set(self.data.cache)
|
||||
|
||||
# calc diff
|
||||
add_addons = all_addons - set(self.addons_obj)
|
||||
del_addons = set(self.addons_obj) - all_addons
|
||||
|
||||
_LOGGER.info("Load add-ons: %d all - %d new - %d remove",
|
||||
len(all_addons), len(add_addons), len(del_addons))
|
||||
|
||||
# new addons
|
||||
tasks = []
|
||||
for addon_slug in add_addons:
|
||||
addon = Addon(self.coresys, addon_slug)
|
||||
|
||||
tasks.append(addon.load())
|
||||
self.addons_obj[addon_slug] = addon
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
# remove
|
||||
for addon_slug in del_addons:
|
||||
self.addons_obj.pop(addon_slug)
|
||||
|
||||
async def boot(self, stage):
|
||||
"""Boot add-ons with mode auto."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and addon.boot == BOOT_AUTO and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.start())
|
||||
|
||||
_LOGGER.info("Startup %s run %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
await asyncio.sleep(self.sys_config.wait_boot)
|
||||
|
||||
async def shutdown(self, stage):
|
||||
"""Shutdown addons."""
|
||||
tasks = []
|
||||
for addon in self.addons_obj.values():
|
||||
if addon.is_installed and \
|
||||
await addon.state() == STATE_STARTED and \
|
||||
addon.startup == stage:
|
||||
tasks.append(addon.stop())
|
||||
|
||||
_LOGGER.info("Shutdown %s stop %d add-ons", stage, len(tasks))
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
@@ -1,997 +0,0 @@
|
||||
"""Init file for Hass.io add-ons."""
|
||||
from contextlib import suppress
|
||||
from copy import deepcopy
|
||||
import logging
|
||||
from pathlib import Path, PurePath
|
||||
import re
|
||||
import shutil
|
||||
import tarfile
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import Dict, Any
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_AUDIO, ATTR_AUDIO_INPUT,
|
||||
ATTR_AUDIO_OUTPUT, ATTR_AUTH_API, ATTR_AUTO_UART, ATTR_AUTO_UPDATE,
|
||||
ATTR_BOOT, ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
|
||||
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
|
||||
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE, ATTR_MAP,
|
||||
ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS, ATTR_PORTS, ATTR_PRIVILEGED,
|
||||
ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA, ATTR_SERVICES, ATTR_SLUG,
|
||||
ATTR_STARTUP, ATTR_STATE, ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT,
|
||||
ATTR_TMPFS, ATTR_URL, ATTR_USER, ATTR_UUID, ATTR_VERSION, ATTR_WEBUI,
|
||||
SECURITY_DEFAULT, SECURITY_DISABLE, SECURITY_PROFILE, STATE_NONE,
|
||||
STATE_STARTED, STATE_STOPPED)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..docker.addon import DockerAddon
|
||||
from ..exceptions import HostAppArmorError, JsonFileError
|
||||
from ..utils import create_token
|
||||
from ..utils.apparmor import adjust_profile
|
||||
from ..utils.json import read_json_file, write_json_file
|
||||
from .utils import check_installed, remove_data
|
||||
from .validate import (
|
||||
MACHINE_ALL, RE_SERVICE, RE_VOLUME, SCHEMA_ADDON_SNAPSHOT,
|
||||
validate_options)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_WEBUI = re.compile(
|
||||
r"^(?:(?P<s_prefix>https?)|\[PROTO:(?P<t_proto>\w+)\])"
|
||||
r":\/\/\[HOST\]:\[PORT:(?P<t_port>\d+)\](?P<s_suffix>.*)$")
|
||||
|
||||
|
||||
class Addon(CoreSysAttributes):
|
||||
"""Hold data for add-on inside Hass.io."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize data holder."""
|
||||
self.coresys = coresys
|
||||
self.instance = DockerAddon(coresys, slug)
|
||||
|
||||
self._id = slug
|
||||
|
||||
async def load(self):
|
||||
"""Async initialize of object."""
|
||||
if not self.is_installed:
|
||||
return
|
||||
await self.instance.attach()
|
||||
|
||||
@property
|
||||
def slug(self):
|
||||
"""Return slug/id of add-on."""
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def _mesh(self):
|
||||
"""Return add-on data from system or cache."""
|
||||
return self._data.system.get(self._id, self._data.cache.get(self._id))
|
||||
|
||||
@property
|
||||
def _data(self):
|
||||
"""Return add-ons data storage."""
|
||||
return self.sys_addons.data
|
||||
|
||||
@property
|
||||
def is_installed(self):
|
||||
"""Return True if an add-on is installed."""
|
||||
return self._id in self._data.system
|
||||
|
||||
@property
|
||||
def is_detached(self):
|
||||
"""Return True if add-on is detached."""
|
||||
return self._id not in self._data.cache
|
||||
|
||||
@property
|
||||
def available(self):
|
||||
"""Return True if this add-on is available on this platform."""
|
||||
# Architecture
|
||||
if not self.sys_arch.is_supported(self.supported_arch):
|
||||
return False
|
||||
|
||||
# Machine / Hardware
|
||||
if self.sys_machine not in self.supported_machine:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@property
|
||||
def version_installed(self):
|
||||
"""Return installed version."""
|
||||
return self._data.user.get(self._id, {}).get(ATTR_VERSION)
|
||||
|
||||
def _set_install(self, image: str, version: str) -> None:
|
||||
"""Set addon as installed."""
|
||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||
self._data.user[self._id] = {
|
||||
ATTR_OPTIONS: {},
|
||||
ATTR_VERSION: version,
|
||||
ATTR_IMAGE: image,
|
||||
}
|
||||
self.save_data()
|
||||
|
||||
def _set_uninstall(self) -> None:
|
||||
"""Set add-on as uninstalled."""
|
||||
self._data.system.pop(self._id, None)
|
||||
self._data.user.pop(self._id, None)
|
||||
self.save_data()
|
||||
|
||||
def _set_update(self, image: str, version: str) -> None:
|
||||
"""Update version of add-on."""
|
||||
self._data.system[self._id] = deepcopy(self._data.cache[self._id])
|
||||
self._data.user[self._id][ATTR_VERSION] = version
|
||||
self.save_data()
|
||||
|
||||
def _restore_data(self, user: Dict[str, Any], system: Dict[str, Any], image: str) -> None:
|
||||
"""Restore data to add-on."""
|
||||
self._data.user[self._id] = deepcopy(user)
|
||||
self._data.system[self._id] = deepcopy(system)
|
||||
|
||||
self._data.user[self._id][ATTR_IMAGE] = image
|
||||
self.save_data()
|
||||
|
||||
@property
|
||||
def options(self):
|
||||
"""Return options with local changes."""
|
||||
if self.is_installed:
|
||||
return {
|
||||
**self._data.system[self._id][ATTR_OPTIONS],
|
||||
**self._data.user[self._id][ATTR_OPTIONS]
|
||||
}
|
||||
return self._data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
@options.setter
|
||||
def options(self, value):
|
||||
"""Store user add-on options."""
|
||||
if value is None:
|
||||
self._data.user[self._id][ATTR_OPTIONS] = {}
|
||||
else:
|
||||
self._data.user[self._id][ATTR_OPTIONS] = deepcopy(value)
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
"""Return boot config with prio local settings."""
|
||||
if ATTR_BOOT in self._data.user.get(self._id, {}):
|
||||
return self._data.user[self._id][ATTR_BOOT]
|
||||
return self._mesh[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value):
|
||||
"""Store user boot options."""
|
||||
self._data.user[self._id][ATTR_BOOT] = value
|
||||
|
||||
@property
|
||||
def auto_update(self):
|
||||
"""Return if auto update is enable."""
|
||||
if ATTR_AUTO_UPDATE in self._data.user.get(self._id, {}):
|
||||
return self._data.user[self._id][ATTR_AUTO_UPDATE]
|
||||
return None
|
||||
|
||||
@auto_update.setter
|
||||
def auto_update(self, value):
|
||||
"""Set auto update."""
|
||||
self._data.user[self._id][ATTR_AUTO_UPDATE] = value
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of add-on."""
|
||||
return self._mesh[ATTR_NAME]
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout of addon for docker stop."""
|
||||
return self._mesh[ATTR_TIMEOUT]
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
"""Return an API token for this add-on."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id][ATTR_UUID]
|
||||
return None
|
||||
|
||||
@property
|
||||
def hassio_token(self):
|
||||
"""Return access token for Hass.io API."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id].get(ATTR_ACCESS_TOKEN)
|
||||
return None
|
||||
|
||||
@property
|
||||
def description(self):
|
||||
"""Return description of add-on."""
|
||||
return self._mesh[ATTR_DESCRIPTON]
|
||||
|
||||
@property
|
||||
def long_description(self):
|
||||
"""Return README.md as long_description."""
|
||||
readme = Path(self.path_location, 'README.md')
|
||||
|
||||
# If readme not exists
|
||||
if not readme.exists():
|
||||
return None
|
||||
|
||||
# Return data
|
||||
with readme.open('r') as readme_file:
|
||||
return readme_file.read()
|
||||
|
||||
@property
|
||||
def repository(self):
|
||||
"""Return repository of add-on."""
|
||||
return self._mesh[ATTR_REPOSITORY]
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return version of add-on."""
|
||||
if self._id in self._data.cache:
|
||||
return self._data.cache[self._id][ATTR_VERSION]
|
||||
return self.version_installed
|
||||
|
||||
@property
|
||||
def protected(self):
|
||||
"""Return if add-on is in protected mode."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id][ATTR_PROTECTED]
|
||||
return True
|
||||
|
||||
@protected.setter
|
||||
def protected(self, value):
|
||||
"""Set add-on in protected mode."""
|
||||
self._data.user[self._id][ATTR_PROTECTED] = value
|
||||
|
||||
@property
|
||||
def startup(self):
|
||||
"""Return startup type of add-on."""
|
||||
return self._mesh.get(ATTR_STARTUP)
|
||||
|
||||
@property
|
||||
def services_role(self):
|
||||
"""Return dict of services with rights."""
|
||||
raw_services = self._mesh.get(ATTR_SERVICES)
|
||||
if not raw_services:
|
||||
return {}
|
||||
|
||||
services = {}
|
||||
for data in raw_services:
|
||||
service = RE_SERVICE.match(data)
|
||||
services[service.group('service')] = service.group('rights')
|
||||
|
||||
return services
|
||||
|
||||
@property
|
||||
def discovery(self):
|
||||
"""Return list of discoverable components/platforms."""
|
||||
return self._mesh.get(ATTR_DISCOVERY, [])
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Return ports of add-on."""
|
||||
if self.host_network or ATTR_PORTS not in self._mesh:
|
||||
return None
|
||||
|
||||
if not self.is_installed or \
|
||||
ATTR_NETWORK not in self._data.user[self._id]:
|
||||
return self._mesh[ATTR_PORTS]
|
||||
return self._data.user[self._id][ATTR_NETWORK]
|
||||
|
||||
@ports.setter
|
||||
def ports(self, value):
|
||||
"""Set custom ports of add-on."""
|
||||
if value is None:
|
||||
self._data.user[self._id].pop(ATTR_NETWORK, None)
|
||||
else:
|
||||
new_ports = {}
|
||||
for container_port, host_port in value.items():
|
||||
if container_port in self._mesh.get(ATTR_PORTS, {}):
|
||||
new_ports[container_port] = host_port
|
||||
|
||||
self._data.user[self._id][ATTR_NETWORK] = new_ports
|
||||
|
||||
@property
|
||||
def webui(self):
|
||||
"""Return URL to webui or None."""
|
||||
if ATTR_WEBUI not in self._mesh:
|
||||
return None
|
||||
webui = RE_WEBUI.match(self._mesh[ATTR_WEBUI])
|
||||
|
||||
# extract arguments
|
||||
t_port = webui.group('t_port')
|
||||
t_proto = webui.group('t_proto')
|
||||
s_prefix = webui.group('s_prefix') or ""
|
||||
s_suffix = webui.group('s_suffix') or ""
|
||||
|
||||
# search host port for this docker port
|
||||
if self.ports is None:
|
||||
port = t_port
|
||||
else:
|
||||
port = self.ports.get(f"{t_port}/tcp", t_port)
|
||||
|
||||
# for interface config or port lists
|
||||
if isinstance(port, (tuple, list)):
|
||||
port = port[-1]
|
||||
|
||||
# lookup the correct protocol from config
|
||||
if t_proto:
|
||||
proto = 'https' if self.options[t_proto] else 'http'
|
||||
else:
|
||||
proto = s_prefix
|
||||
|
||||
return f"{proto}://[HOST]:{port}{s_suffix}"
|
||||
|
||||
@property
|
||||
def host_network(self):
|
||||
"""Return True if add-on run on host network."""
|
||||
return self._mesh[ATTR_HOST_NETWORK]
|
||||
|
||||
@property
|
||||
def host_pid(self):
|
||||
"""Return True if add-on run on host PID namespace."""
|
||||
return self._mesh[ATTR_HOST_PID]
|
||||
|
||||
@property
|
||||
def host_ipc(self):
|
||||
"""Return True if add-on run on host IPC namespace."""
|
||||
return self._mesh[ATTR_HOST_IPC]
|
||||
|
||||
@property
|
||||
def host_dbus(self):
|
||||
"""Return True if add-on run on host D-BUS."""
|
||||
return self._mesh[ATTR_HOST_DBUS]
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return devices of add-on."""
|
||||
return self._mesh.get(ATTR_DEVICES)
|
||||
|
||||
@property
|
||||
def auto_uart(self):
|
||||
"""Return True if we should map all UART device."""
|
||||
return self._mesh.get(ATTR_AUTO_UART)
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs of add-on."""
|
||||
return self._mesh.get(ATTR_TMPFS)
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment of add-on."""
|
||||
return self._mesh.get(ATTR_ENVIRONMENT)
|
||||
|
||||
@property
|
||||
def privileged(self):
|
||||
"""Return list of privilege."""
|
||||
return self._mesh.get(ATTR_PRIVILEGED, [])
|
||||
|
||||
@property
|
||||
def apparmor(self):
|
||||
"""Return True if AppArmor is enabled."""
|
||||
if not self._mesh.get(ATTR_APPARMOR):
|
||||
return SECURITY_DISABLE
|
||||
elif self.sys_host.apparmor.exists(self.slug):
|
||||
return SECURITY_PROFILE
|
||||
return SECURITY_DEFAULT
|
||||
|
||||
@property
|
||||
def legacy(self):
|
||||
"""Return if the add-on don't support Home Assistant labels."""
|
||||
return self._mesh.get(ATTR_LEGACY)
|
||||
|
||||
@property
|
||||
def access_docker_api(self):
|
||||
"""Return if the add-on need read-only Docker API access."""
|
||||
return self._mesh.get(ATTR_DOCKER_API)
|
||||
|
||||
@property
|
||||
def access_hassio_api(self):
|
||||
"""Return True if the add-on access to Hass.io REASTful API."""
|
||||
return self._mesh[ATTR_HASSIO_API]
|
||||
|
||||
@property
|
||||
def access_homeassistant_api(self):
|
||||
"""Return True if the add-on access to Home Assistant API proxy."""
|
||||
return self._mesh[ATTR_HOMEASSISTANT_API]
|
||||
|
||||
@property
|
||||
def hassio_role(self):
|
||||
"""Return Hass.io role for API."""
|
||||
return self._mesh[ATTR_HASSIO_ROLE]
|
||||
|
||||
@property
|
||||
def with_stdin(self):
|
||||
"""Return True if the add-on access use stdin input."""
|
||||
return self._mesh[ATTR_STDIN]
|
||||
|
||||
@property
|
||||
def with_gpio(self):
|
||||
"""Return True if the add-on access to GPIO interface."""
|
||||
return self._mesh[ATTR_GPIO]
|
||||
|
||||
@property
|
||||
def with_kernel_modules(self):
|
||||
"""Return True if the add-on access to kernel modules."""
|
||||
return self._mesh[ATTR_KERNEL_MODULES]
|
||||
|
||||
@property
|
||||
def with_full_access(self):
|
||||
"""Return True if the add-on want full access to hardware."""
|
||||
return self._mesh[ATTR_FULL_ACCESS]
|
||||
|
||||
@property
|
||||
def with_devicetree(self):
|
||||
"""Return True if the add-on read access to devicetree."""
|
||||
return self._mesh[ATTR_DEVICETREE]
|
||||
|
||||
@property
|
||||
def access_auth_api(self):
|
||||
"""Return True if the add-on access to login/auth backend."""
|
||||
return self._mesh[ATTR_AUTH_API]
|
||||
|
||||
@property
|
||||
def with_audio(self):
|
||||
"""Return True if the add-on access to audio."""
|
||||
return self._mesh[ATTR_AUDIO]
|
||||
|
||||
@property
|
||||
def audio_output(self):
|
||||
"""Return ALSA config for output or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
|
||||
if self.is_installed and \
|
||||
ATTR_AUDIO_OUTPUT in self._data.user[self._id]:
|
||||
return self._data.user[self._id][ATTR_AUDIO_OUTPUT]
|
||||
return self.sys_host.alsa.default.output
|
||||
|
||||
@audio_output.setter
|
||||
def audio_output(self, value):
|
||||
"""Set/reset audio output settings."""
|
||||
if value is None:
|
||||
self._data.user[self._id].pop(ATTR_AUDIO_OUTPUT, None)
|
||||
else:
|
||||
self._data.user[self._id][ATTR_AUDIO_OUTPUT] = value
|
||||
|
||||
@property
|
||||
def audio_input(self):
|
||||
"""Return ALSA config for input or None."""
|
||||
if not self.with_audio:
|
||||
return None
|
||||
|
||||
if self.is_installed and ATTR_AUDIO_INPUT in self._data.user[self._id]:
|
||||
return self._data.user[self._id][ATTR_AUDIO_INPUT]
|
||||
return self.sys_host.alsa.default.input
|
||||
|
||||
@audio_input.setter
|
||||
def audio_input(self, value):
|
||||
"""Set/reset audio input settings."""
|
||||
if value is None:
|
||||
self._data.user[self._id].pop(ATTR_AUDIO_INPUT, None)
|
||||
else:
|
||||
self._data.user[self._id][ATTR_AUDIO_INPUT] = value
|
||||
|
||||
@property
|
||||
def url(self):
|
||||
"""Return URL of add-on."""
|
||||
return self._mesh.get(ATTR_URL)
|
||||
|
||||
@property
|
||||
def with_icon(self):
|
||||
"""Return True if an icon exists."""
|
||||
return self.path_icon.exists()
|
||||
|
||||
@property
|
||||
def with_logo(self):
|
||||
"""Return True if a logo exists."""
|
||||
return self.path_logo.exists()
|
||||
|
||||
@property
|
||||
def with_changelog(self):
|
||||
"""Return True if a changelog exists."""
|
||||
return self.path_changelog.exists()
|
||||
|
||||
@property
|
||||
def supported_arch(self):
|
||||
"""Return list of supported arch."""
|
||||
return self._mesh[ATTR_ARCH]
|
||||
|
||||
@property
|
||||
def supported_machine(self):
|
||||
"""Return list of supported machine."""
|
||||
return self._mesh.get(ATTR_MACHINE) or MACHINE_ALL
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of add-on."""
|
||||
if self.is_installed:
|
||||
return self._data.user[self._id].get(ATTR_IMAGE)
|
||||
return self.image_next
|
||||
|
||||
@property
|
||||
def image_next(self):
|
||||
"""Return image name for install/update."""
|
||||
if self.is_detached:
|
||||
addon_data = self._data.system.get(self._id)
|
||||
else:
|
||||
addon_data = self._data.cache.get(self._id)
|
||||
return self._get_image(addon_data)
|
||||
|
||||
def _get_image(self, addon_data) -> str:
|
||||
"""Generate image name from data."""
|
||||
# Repository with Dockerhub images
|
||||
if ATTR_IMAGE in addon_data:
|
||||
arch = self.sys_arch.match(addon_data[ATTR_ARCH])
|
||||
return addon_data[ATTR_IMAGE].format(arch=arch)
|
||||
|
||||
# local build
|
||||
return (f"{addon_data[ATTR_REPOSITORY]}/"
|
||||
f"{self.sys_arch.default}-"
|
||||
f"addon-{addon_data[ATTR_SLUG]}")
|
||||
|
||||
@property
|
||||
def need_build(self):
|
||||
"""Return True if this add-on need a local build."""
|
||||
if self.is_detached:
|
||||
return ATTR_IMAGE not in self._data.system.get(self._id)
|
||||
return ATTR_IMAGE not in self._data.cache.get(self._id)
|
||||
|
||||
@property
|
||||
def map_volumes(self):
|
||||
"""Return a dict of {volume: policy} from add-on."""
|
||||
volumes = {}
|
||||
for volume in self._mesh[ATTR_MAP]:
|
||||
result = RE_VOLUME.match(volume)
|
||||
volumes[result.group(1)] = result.group(2) or 'ro'
|
||||
|
||||
return volumes
|
||||
|
||||
@property
|
||||
def path_data(self):
|
||||
"""Return add-on data path inside Supervisor."""
|
||||
return Path(self.sys_config.path_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_extern_data(self):
|
||||
"""Return add-on data path external for Docker."""
|
||||
return PurePath(self.sys_config.path_extern_addons_data, self._id)
|
||||
|
||||
@property
|
||||
def path_options(self):
|
||||
"""Return path to add-on options."""
|
||||
return Path(self.path_data, "options.json")
|
||||
|
||||
@property
|
||||
def path_location(self):
|
||||
"""Return path to this add-on."""
|
||||
return Path(self._mesh[ATTR_LOCATON])
|
||||
|
||||
@property
|
||||
def path_icon(self):
|
||||
"""Return path to add-on icon."""
|
||||
return Path(self.path_location, 'icon.png')
|
||||
|
||||
@property
|
||||
def path_logo(self):
|
||||
"""Return path to add-on logo."""
|
||||
return Path(self.path_location, 'logo.png')
|
||||
|
||||
@property
|
||||
def path_changelog(self):
|
||||
"""Return path to add-on changelog."""
|
||||
return Path(self.path_location, 'CHANGELOG.md')
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return path to custom AppArmor profile."""
|
||||
return Path(self.path_location, 'apparmor.txt')
|
||||
|
||||
@property
|
||||
def path_asound(self):
|
||||
"""Return path to asound config."""
|
||||
return Path(self.sys_config.path_tmp, f"{self.slug}_asound")
|
||||
|
||||
@property
|
||||
def path_extern_asound(self):
|
||||
"""Return path to asound config for Docker."""
|
||||
return Path(self.sys_config.path_extern_tmp, f"{self.slug}_asound")
|
||||
|
||||
def save_data(self):
|
||||
"""Save data of add-on."""
|
||||
self.sys_addons.data.save_data()
|
||||
|
||||
def write_options(self):
|
||||
"""Return True if add-on options is written to data."""
|
||||
schema = self.schema
|
||||
options = self.options
|
||||
|
||||
try:
|
||||
schema(options)
|
||||
write_json_file(self.path_options, options)
|
||||
except vol.Invalid as ex:
|
||||
_LOGGER.error("Add-on %s have wrong options: %s", self._id,
|
||||
humanize_error(options, ex))
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Add-on %s can't write options", self._id)
|
||||
else:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def remove_discovery(self):
|
||||
"""Remove all discovery message from add-on."""
|
||||
for message in self.sys_discovery.list_messages:
|
||||
if message.addon != self.slug:
|
||||
continue
|
||||
self.sys_discovery.remove(message)
|
||||
|
||||
def write_asound(self):
|
||||
"""Write asound config to file and return True on success."""
|
||||
asound_config = self.sys_host.alsa.asound(
|
||||
alsa_input=self.audio_input, alsa_output=self.audio_output)
|
||||
|
||||
try:
|
||||
with self.path_asound.open('w') as config_file:
|
||||
config_file.write(asound_config)
|
||||
except OSError as err:
|
||||
_LOGGER.error("Add-on %s can't write asound: %s", self._id, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
async def _install_apparmor(self):
|
||||
"""Install or Update AppArmor profile for Add-on."""
|
||||
exists_local = self.sys_host.apparmor.exists(self.slug)
|
||||
exists_addon = self.path_apparmor.exists()
|
||||
|
||||
# Nothing to do
|
||||
if not exists_local and not exists_addon:
|
||||
return
|
||||
|
||||
# Need removed
|
||||
if exists_local and not exists_addon:
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
return
|
||||
|
||||
# Need install/update
|
||||
with TemporaryDirectory(dir=self.sys_config.path_tmp) as tmp_folder:
|
||||
profile_file = Path(tmp_folder, 'apparmor.txt')
|
||||
|
||||
adjust_profile(self.slug, self.path_apparmor, profile_file)
|
||||
await self.sys_host.apparmor.load_profile(self.slug, profile_file)
|
||||
|
||||
@property
|
||||
def schema(self):
|
||||
"""Create a schema for add-on options."""
|
||||
raw_schema = self._mesh[ATTR_SCHEMA]
|
||||
|
||||
if isinstance(raw_schema, bool):
|
||||
return vol.Schema(dict)
|
||||
return vol.Schema(vol.All(dict, validate_options(raw_schema)))
|
||||
|
||||
def test_update_schema(self):
|
||||
"""Check if the existing configuration is valid after update."""
|
||||
if not self.is_installed or self.is_detached:
|
||||
return True
|
||||
|
||||
# load next schema
|
||||
new_raw_schema = self._data.cache[self._id][ATTR_SCHEMA]
|
||||
default_options = self._data.cache[self._id][ATTR_OPTIONS]
|
||||
|
||||
# if disabled
|
||||
if isinstance(new_raw_schema, bool):
|
||||
return True
|
||||
|
||||
# merge options
|
||||
options = {
|
||||
**self._data.user[self._id][ATTR_OPTIONS],
|
||||
**default_options,
|
||||
}
|
||||
|
||||
# create voluptuous
|
||||
new_schema = \
|
||||
vol.Schema(vol.All(dict, validate_options(new_raw_schema)))
|
||||
|
||||
# validate
|
||||
try:
|
||||
new_schema(options)
|
||||
except vol.Invalid:
|
||||
return False
|
||||
return True
|
||||
|
||||
async def install(self):
|
||||
"""Install an add-on."""
|
||||
if not self.available:
|
||||
_LOGGER.error(
|
||||
"Add-on %s not supported on %s with %s architecture",
|
||||
self._id, self.sys_machine, self.sys_arch.supported)
|
||||
return False
|
||||
|
||||
if self.is_installed:
|
||||
_LOGGER.error("Add-on %s is already installed", self._id)
|
||||
return False
|
||||
|
||||
if not self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Create Home Assistant add-on data folder %s", self.path_data)
|
||||
self.path_data.mkdir()
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
|
||||
if not await self.instance.install(
|
||||
self.last_version, self.image_next):
|
||||
return False
|
||||
|
||||
self._set_install(self.image_next, self.last_version)
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def uninstall(self):
|
||||
"""Remove an add-on."""
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
|
||||
if self.path_data.is_dir():
|
||||
_LOGGER.info(
|
||||
"Remove Home Assistant add-on data folder %s", self.path_data)
|
||||
await remove_data(self.path_data)
|
||||
|
||||
# Cleanup audio settings
|
||||
if self.path_asound.exists():
|
||||
with suppress(OSError):
|
||||
self.path_asound.unlink()
|
||||
|
||||
# Cleanup AppArmor profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
with suppress(HostAppArmorError):
|
||||
await self.sys_host.apparmor.remove_profile(self.slug)
|
||||
|
||||
# Remove discovery messages
|
||||
self.remove_discovery()
|
||||
|
||||
self._set_uninstall()
|
||||
return True
|
||||
|
||||
async def state(self):
|
||||
"""Return running state of add-on."""
|
||||
if not self.is_installed:
|
||||
return STATE_NONE
|
||||
|
||||
if await self.instance.is_running():
|
||||
return STATE_STARTED
|
||||
return STATE_STOPPED
|
||||
|
||||
@check_installed
|
||||
async def start(self):
|
||||
"""Set options and start add-on."""
|
||||
if await self.instance.is_running():
|
||||
_LOGGER.warning("%s already running!", self.slug)
|
||||
return
|
||||
|
||||
# Access Token
|
||||
self._data.user[self._id][ATTR_ACCESS_TOKEN] = create_token()
|
||||
self.save_data()
|
||||
|
||||
# Options
|
||||
if not self.write_options():
|
||||
return False
|
||||
|
||||
# Sound
|
||||
if self.with_audio and not self.write_asound():
|
||||
return False
|
||||
|
||||
return await self.instance.run()
|
||||
|
||||
@check_installed
|
||||
def stop(self):
|
||||
"""Stop add-on.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stop()
|
||||
|
||||
@check_installed
|
||||
async def update(self):
|
||||
"""Update add-on."""
|
||||
last_state = await self.state()
|
||||
|
||||
if self.last_version == self.version_installed:
|
||||
_LOGGER.warning("No update available for add-on %s", self._id)
|
||||
return False
|
||||
|
||||
if not await self.instance.update(
|
||||
self.last_version, self.image_next):
|
||||
return False
|
||||
self._set_update(self.image_next, self.last_version)
|
||||
|
||||
# Setup/Fix AppArmor profile
|
||||
await self._install_apparmor()
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.start()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def restart(self):
|
||||
"""Restart add-on."""
|
||||
await self.stop()
|
||||
return await self.start()
|
||||
|
||||
@check_installed
|
||||
def logs(self):
|
||||
"""Return add-ons log output.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
@check_installed
|
||||
def stats(self):
|
||||
"""Return stats of container.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
||||
|
||||
@check_installed
|
||||
async def rebuild(self):
|
||||
"""Perform a rebuild of local build add-on."""
|
||||
last_state = await self.state()
|
||||
|
||||
if not self.need_build:
|
||||
_LOGGER.error("Can't rebuild a none local build add-on!")
|
||||
return False
|
||||
|
||||
# remove docker container but not addon config
|
||||
if not await self.instance.remove():
|
||||
return False
|
||||
|
||||
if not await self.instance.install(self.version_installed):
|
||||
return False
|
||||
|
||||
# restore state
|
||||
if last_state == STATE_STARTED:
|
||||
await self.start()
|
||||
return True
|
||||
|
||||
@check_installed
|
||||
async def write_stdin(self, data):
|
||||
"""Write data to add-on stdin.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
if not self.with_stdin:
|
||||
_LOGGER.error("Add-on don't support write to stdin!")
|
||||
return False
|
||||
|
||||
return await self.instance.write_stdin(data)
|
||||
|
||||
@check_installed
|
||||
async def snapshot(self, tar_file):
|
||||
"""Snapshot state of an add-on."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||
# store local image
|
||||
if self.need_build and not await \
|
||||
self.instance.export_image(Path(temp, 'image.tar')):
|
||||
return False
|
||||
|
||||
data = {
|
||||
ATTR_USER: self._data.user.get(self._id, {}),
|
||||
ATTR_SYSTEM: self._data.system.get(self._id, {}),
|
||||
ATTR_VERSION: self.version_installed,
|
||||
ATTR_STATE: await self.state(),
|
||||
}
|
||||
|
||||
# Store local configs/state
|
||||
try:
|
||||
write_json_file(Path(temp, 'addon.json'), data)
|
||||
except JsonFileError:
|
||||
_LOGGER.error("Can't save meta for %s", self._id)
|
||||
return False
|
||||
|
||||
# Store AppArmor Profile
|
||||
if self.sys_host.apparmor.exists(self.slug):
|
||||
profile = Path(temp, 'apparmor.txt')
|
||||
try:
|
||||
self.sys_host.apparmor.backup_profile(self.slug, profile)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't backup AppArmor profile")
|
||||
return False
|
||||
|
||||
# write into tarfile
|
||||
def _write_tarfile():
|
||||
"""Write tar inside loop."""
|
||||
with tar_file as snapshot:
|
||||
snapshot.add(temp, arcname=".")
|
||||
snapshot.add(self.path_data, arcname="data")
|
||||
|
||||
try:
|
||||
_LOGGER.info("Build snapshot for add-on %s", self._id)
|
||||
await self.sys_run_in_executor(_write_tarfile)
|
||||
except (tarfile.TarError, OSError) as err:
|
||||
_LOGGER.error("Can't write tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Finish snapshot for addon %s", self._id)
|
||||
return True
|
||||
|
||||
async def restore(self, tar_file):
|
||||
"""Restore state of an add-on."""
|
||||
with TemporaryDirectory(dir=str(self.sys_config.path_tmp)) as temp:
|
||||
# extract snapshot
|
||||
def _extract_tarfile():
|
||||
"""Extract tar snapshot."""
|
||||
with tar_file as snapshot:
|
||||
snapshot.extractall(path=Path(temp))
|
||||
|
||||
try:
|
||||
await self.sys_run_in_executor(_extract_tarfile)
|
||||
except tarfile.TarError as err:
|
||||
_LOGGER.error("Can't read tarfile %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
# Read snapshot data
|
||||
try:
|
||||
data = read_json_file(Path(temp, 'addon.json'))
|
||||
except JsonFileError:
|
||||
return False
|
||||
|
||||
# Validate
|
||||
try:
|
||||
data = SCHEMA_ADDON_SNAPSHOT(data)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error("Can't validate %s, snapshot data: %s",
|
||||
self._id, humanize_error(data, err))
|
||||
return False
|
||||
|
||||
# Restore data or reload add-on
|
||||
_LOGGER.info("Restore config for addon %s", self._id)
|
||||
restore_image = self._get_image(data[ATTR_SYSTEM])
|
||||
self._restore_data(data[ATTR_USER], data[ATTR_SYSTEM], restore_image)
|
||||
|
||||
# Check version / restore image
|
||||
version = data[ATTR_VERSION]
|
||||
if not await self.instance.exists():
|
||||
_LOGGER.info("Restore image for addon %s", self._id)
|
||||
|
||||
image_file = Path(temp, 'image.tar')
|
||||
if image_file.is_file():
|
||||
await self.instance.import_image(image_file, version)
|
||||
else:
|
||||
if await self.instance.install(version, restore_image):
|
||||
await self.instance.cleanup()
|
||||
else:
|
||||
await self.instance.stop()
|
||||
|
||||
# Restore data
|
||||
def _restore_data():
|
||||
"""Restore data."""
|
||||
shutil.copytree(str(Path(temp, "data")), str(self.path_data))
|
||||
|
||||
_LOGGER.info("Restore data for addon %s", self._id)
|
||||
if self.path_data.is_dir():
|
||||
await remove_data(self.path_data)
|
||||
try:
|
||||
await self.sys_run_in_executor(_restore_data)
|
||||
except shutil.Error as err:
|
||||
_LOGGER.error("Can't restore origin data: %s", err)
|
||||
return False
|
||||
|
||||
# Restore AppArmor
|
||||
profile_file = Path(temp, 'apparmor.txt')
|
||||
if profile_file.exists():
|
||||
try:
|
||||
await self.sys_host.apparmor.load_profile(
|
||||
self.slug, profile_file)
|
||||
except HostAppArmorError:
|
||||
_LOGGER.error("Can't restore AppArmor profile")
|
||||
return False
|
||||
|
||||
# Run add-on
|
||||
if data[ATTR_STATE] == STATE_STARTED:
|
||||
return await self.start()
|
||||
|
||||
_LOGGER.info("Finish restore for add-on %s", self._id)
|
||||
return True
|
@@ -1,311 +0,0 @@
|
||||
"""Validate add-ons options schema."""
|
||||
import logging
|
||||
import re
|
||||
import uuid
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ARCH_ALL, ATTR_ACCESS_TOKEN, ATTR_APPARMOR, ATTR_ARCH, ATTR_ARGS,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_AUTH_API,
|
||||
ATTR_AUTO_UART, ATTR_AUTO_UPDATE, ATTR_BOOT, ATTR_BUILD_FROM,
|
||||
ATTR_DESCRIPTON, ATTR_DEVICES, ATTR_DEVICETREE, ATTR_DISCOVERY,
|
||||
ATTR_DOCKER_API, ATTR_ENVIRONMENT, ATTR_FULL_ACCESS, ATTR_GPIO,
|
||||
ATTR_HASSIO_API, ATTR_HASSIO_ROLE, ATTR_HOMEASSISTANT_API, ATTR_HOST_DBUS,
|
||||
ATTR_HOST_IPC, ATTR_HOST_NETWORK, ATTR_HOST_PID, ATTR_IMAGE,
|
||||
ATTR_KERNEL_MODULES, ATTR_LEGACY, ATTR_LOCATON, ATTR_MACHINE,
|
||||
ATTR_MAINTAINER, ATTR_MAP, ATTR_NAME, ATTR_NETWORK, ATTR_OPTIONS,
|
||||
ATTR_PORTS, ATTR_PRIVILEGED, ATTR_PROTECTED, ATTR_REPOSITORY, ATTR_SCHEMA,
|
||||
ATTR_SERVICES, ATTR_SLUG, ATTR_SQUASH, ATTR_STARTUP, ATTR_STATE,
|
||||
ATTR_STDIN, ATTR_SYSTEM, ATTR_TIMEOUT, ATTR_TMPFS, ATTR_URL, ATTR_USER,
|
||||
ATTR_UUID, ATTR_VERSION, ATTR_WEBUI, BOOT_AUTO, BOOT_MANUAL,
|
||||
PRIVILEGED_ALL, ROLE_ALL, ROLE_DEFAULT, STARTUP_ALL, STARTUP_APPLICATION,
|
||||
STARTUP_SERVICES, STATE_STARTED, STATE_STOPPED)
|
||||
from ..services.validate import DISCOVERY_SERVICES
|
||||
from ..validate import (
|
||||
ALSA_DEVICE, DOCKER_PORTS, NETWORK_PORT, SHA256, UUID_MATCH)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
RE_VOLUME = re.compile(r"^(config|ssl|addons|backup|share)(?::(rw|ro))?$")
|
||||
RE_SERVICE = re.compile(r"^(?P<service>mqtt):(?P<rights>provide|want|need)$")
|
||||
|
||||
V_STR = 'str'
|
||||
V_INT = 'int'
|
||||
V_FLOAT = 'float'
|
||||
V_BOOL = 'bool'
|
||||
V_EMAIL = 'email'
|
||||
V_URL = 'url'
|
||||
V_PORT = 'port'
|
||||
V_MATCH = 'match'
|
||||
|
||||
RE_SCHEMA_ELEMENT = re.compile(
|
||||
r"^(?:"
|
||||
r"|str|bool|email|url|port"
|
||||
r"|int(?:\((?P<i_min>\d+)?,(?P<i_max>\d+)?\))?"
|
||||
r"|float(?:\((?P<f_min>[\d\.]+)?,(?P<f_max>[\d\.]+)?\))?"
|
||||
r"|match\((?P<match>.*)\)"
|
||||
r")\??$"
|
||||
)
|
||||
|
||||
RE_DOCKER_IMAGE = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)$")
|
||||
RE_DOCKER_IMAGE_BUILD = re.compile(
|
||||
r"^([a-zA-Z\-\.:\d{}]+/)*?([\-\w{}]+)/([\-\w{}]+)(:[\.\-\w{}]+)?$")
|
||||
|
||||
SCHEMA_ELEMENT = vol.Match(RE_SCHEMA_ELEMENT)
|
||||
|
||||
|
||||
MACHINE_ALL = [
|
||||
'intel-nuc', 'odroid-c2', 'odroid-xu', 'orangepi-prime', 'qemux86',
|
||||
'qemux86-64', 'qemuarm', 'qemuarm-64', 'raspberrypi', 'raspberrypi2',
|
||||
'raspberrypi3', 'raspberrypi3-64', 'tinker',
|
||||
]
|
||||
|
||||
|
||||
def _simple_startup(value):
|
||||
"""Simple startup schema."""
|
||||
if value == "before":
|
||||
return STARTUP_SERVICES
|
||||
if value == "after":
|
||||
return STARTUP_APPLICATION
|
||||
return value
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Required(ATTR_SLUG): vol.Coerce(str),
|
||||
vol.Required(ATTR_DESCRIPTON): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_ARCH, default=ARCH_ALL): [vol.In(ARCH_ALL)],
|
||||
vol.Optional(ATTR_MACHINE): [vol.In(MACHINE_ALL)],
|
||||
vol.Required(ATTR_STARTUP):
|
||||
vol.All(_simple_startup, vol.In(STARTUP_ALL)),
|
||||
vol.Required(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_PORTS): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_WEBUI):
|
||||
vol.Match(r"^(?:https?|\[PROTO:\w+\]):\/\/\[HOST\]:\[PORT:\d+\].*$"),
|
||||
vol.Optional(ATTR_HOST_NETWORK, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_PID, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_IPC, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HOST_DBUS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICES): [vol.Match(r"^(.*):(.*):([rwm]{1,3})$")],
|
||||
vol.Optional(ATTR_AUTO_UART, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_TMPFS):
|
||||
vol.Match(r"^size=(\d)*[kmg](,uid=\d{1,4})?(,rw)?$"),
|
||||
vol.Optional(ATTR_MAP, default=list): [vol.Match(RE_VOLUME)],
|
||||
vol.Optional(ATTR_ENVIRONMENT): {vol.Match(r"\w*"): vol.Coerce(str)},
|
||||
vol.Optional(ATTR_PRIVILEGED): [vol.In(PRIVILEGED_ALL)],
|
||||
vol.Optional(ATTR_APPARMOR, default=True): vol.Boolean(),
|
||||
vol.Optional(ATTR_FULL_ACCESS, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_GPIO, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DEVICETREE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_KERNEL_MODULES, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_HASSIO_ROLE, default=ROLE_DEFAULT): vol.In(ROLE_ALL),
|
||||
vol.Optional(ATTR_HOMEASSISTANT_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_STDIN, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_LEGACY, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_DOCKER_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUTH_API, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_SERVICES): [vol.Match(RE_SERVICE)],
|
||||
vol.Optional(ATTR_DISCOVERY): [vol.In(DISCOVERY_SERVICES)],
|
||||
vol.Required(ATTR_OPTIONS): dict,
|
||||
vol.Required(ATTR_SCHEMA): vol.Any(vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [
|
||||
vol.Any(
|
||||
SCHEMA_ELEMENT,
|
||||
{vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])}
|
||||
),
|
||||
], vol.Schema({
|
||||
vol.Coerce(str): vol.Any(SCHEMA_ELEMENT, [SCHEMA_ELEMENT])
|
||||
}))
|
||||
}), False),
|
||||
vol.Optional(ATTR_IMAGE):
|
||||
vol.Match(RE_DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_TIMEOUT, default=10):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=10, max=120)),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_REPOSITORY_CONFIG = vol.Schema({
|
||||
vol.Required(ATTR_NAME): vol.Coerce(str),
|
||||
vol.Optional(ATTR_URL): vol.Url(),
|
||||
vol.Optional(ATTR_MAINTAINER): vol.Coerce(str),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_BUILD_CONFIG = vol.Schema({
|
||||
vol.Optional(ATTR_BUILD_FROM, default=dict): vol.Schema({
|
||||
vol.In(ARCH_ALL): vol.Match(RE_DOCKER_IMAGE_BUILD),
|
||||
}),
|
||||
vol.Optional(ATTR_SQUASH, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_ARGS, default=dict): vol.Schema({
|
||||
vol.Coerce(str): vol.Coerce(str)
|
||||
}),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_ADDON_USER = vol.Schema({
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
vol.Optional(ATTR_IMAGE): vol.Coerce(str),
|
||||
vol.Optional(ATTR_UUID, default=lambda: uuid.uuid4().hex): UUID_MATCH,
|
||||
vol.Optional(ATTR_ACCESS_TOKEN): SHA256,
|
||||
vol.Optional(ATTR_OPTIONS, default=dict): dict,
|
||||
vol.Optional(ATTR_AUTO_UPDATE, default=False): vol.Boolean(),
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): DOCKER_PORTS,
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_PROTECTED, default=True): vol.Boolean(),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
SCHEMA_ADDON_SYSTEM = SCHEMA_ADDON_CONFIG.extend({
|
||||
vol.Required(ATTR_LOCATON): vol.Coerce(str),
|
||||
vol.Required(ATTR_REPOSITORY): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDONS_FILE = vol.Schema({
|
||||
vol.Optional(ATTR_USER, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_USER,
|
||||
},
|
||||
vol.Optional(ATTR_SYSTEM, default=dict): {
|
||||
vol.Coerce(str): SCHEMA_ADDON_SYSTEM,
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
SCHEMA_ADDON_SNAPSHOT = vol.Schema({
|
||||
vol.Required(ATTR_USER): SCHEMA_ADDON_USER,
|
||||
vol.Required(ATTR_SYSTEM): SCHEMA_ADDON_SYSTEM,
|
||||
vol.Required(ATTR_STATE): vol.In([STATE_STARTED, STATE_STOPPED]),
|
||||
vol.Required(ATTR_VERSION): vol.Coerce(str),
|
||||
}, extra=vol.REMOVE_EXTRA)
|
||||
|
||||
|
||||
def validate_options(raw_schema):
|
||||
"""Validate schema."""
|
||||
def validate(struct):
|
||||
"""Create schema validator for add-ons options."""
|
||||
options = {}
|
||||
|
||||
# read options
|
||||
for key, value in struct.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if key not in raw_schema:
|
||||
_LOGGER.warning("Unknown options %s", key)
|
||||
continue
|
||||
|
||||
typ = raw_schema[key]
|
||||
try:
|
||||
if isinstance(typ, list):
|
||||
# nested value list
|
||||
options[key] = _nested_validate_list(typ[0], value, key)
|
||||
elif isinstance(typ, dict):
|
||||
# nested value dict
|
||||
options[key] = _nested_validate_dict(typ, value, key)
|
||||
else:
|
||||
# normal value
|
||||
options[key] = _single_validate(typ, value, key)
|
||||
except (IndexError, KeyError):
|
||||
raise vol.Invalid(f"Type error for {key}") from None
|
||||
|
||||
_check_missing_options(raw_schema, options, 'root')
|
||||
return options
|
||||
|
||||
return validate
|
||||
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
# pylint: disable=inconsistent-return-statements
|
||||
def _single_validate(typ, value, key):
|
||||
"""Validate a single element."""
|
||||
# if required argument
|
||||
if value is None:
|
||||
raise vol.Invalid(f"Missing required option '{key}'")
|
||||
|
||||
# parse extend data from type
|
||||
match = RE_SCHEMA_ELEMENT.match(typ)
|
||||
|
||||
# prepare range
|
||||
range_args = {}
|
||||
for group_name in ('i_min', 'i_max', 'f_min', 'f_max'):
|
||||
group_value = match.group(group_name)
|
||||
if group_value:
|
||||
range_args[group_name[2:]] = float(group_value)
|
||||
|
||||
if typ.startswith(V_STR):
|
||||
return str(value)
|
||||
elif typ.startswith(V_INT):
|
||||
return vol.All(vol.Coerce(int), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_FLOAT):
|
||||
return vol.All(vol.Coerce(float), vol.Range(**range_args))(value)
|
||||
elif typ.startswith(V_BOOL):
|
||||
return vol.Boolean()(value)
|
||||
elif typ.startswith(V_EMAIL):
|
||||
return vol.Email()(value)
|
||||
elif typ.startswith(V_URL):
|
||||
return vol.Url()(value)
|
||||
elif typ.startswith(V_PORT):
|
||||
return NETWORK_PORT(value)
|
||||
elif typ.startswith(V_MATCH):
|
||||
return vol.Match(match.group('match'))(str(value))
|
||||
|
||||
raise vol.Invalid(f"Fatal error for {key} type {typ}")
|
||||
|
||||
|
||||
def _nested_validate_list(typ, data_list, key):
|
||||
"""Validate nested items."""
|
||||
options = []
|
||||
|
||||
for element in data_list:
|
||||
# Nested?
|
||||
if isinstance(typ, dict):
|
||||
c_options = _nested_validate_dict(typ, element, key)
|
||||
options.append(c_options)
|
||||
else:
|
||||
options.append(_single_validate(typ, element, key))
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def _nested_validate_dict(typ, data_dict, key):
|
||||
"""Validate nested items."""
|
||||
options = {}
|
||||
|
||||
for c_key, c_value in data_dict.items():
|
||||
# Ignore unknown options / remove from list
|
||||
if c_key not in typ:
|
||||
_LOGGER.warning("Unknown options %s", c_key)
|
||||
continue
|
||||
|
||||
# Nested?
|
||||
if isinstance(typ[c_key], list):
|
||||
options[c_key] = _nested_validate_list(typ[c_key][0],
|
||||
c_value, c_key)
|
||||
else:
|
||||
options[c_key] = _single_validate(typ[c_key], c_value, c_key)
|
||||
|
||||
_check_missing_options(typ, options, key)
|
||||
return options
|
||||
|
||||
|
||||
def _check_missing_options(origin, exists, root):
|
||||
"""Check if all options are exists."""
|
||||
missing = set(origin) - set(exists)
|
||||
for miss_opt in missing:
|
||||
if isinstance(origin[miss_opt], str) and \
|
||||
origin[miss_opt].endswith("?"):
|
||||
continue
|
||||
raise vol.Invalid(f"Missing option {miss_opt} in {root}")
|
@@ -1,282 +0,0 @@
|
||||
"""Init file for Hass.io RESTful API."""
|
||||
import logging
|
||||
from pathlib import Path
|
||||
|
||||
from aiohttp import web
|
||||
|
||||
from .addons import APIAddons
|
||||
from .auth import APIAuth
|
||||
from .discovery import APIDiscovery
|
||||
from .homeassistant import APIHomeAssistant
|
||||
from .hardware import APIHardware
|
||||
from .host import APIHost
|
||||
from .hassos import APIHassOS
|
||||
from .info import APIInfo
|
||||
from .proxy import APIProxy
|
||||
from .supervisor import APISupervisor
|
||||
from .snapshots import APISnapshots
|
||||
from .services import APIServices
|
||||
from .security import SecurityMiddleware
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class RestAPI(CoreSysAttributes):
|
||||
"""Handle RESTful API for Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self.security = SecurityMiddleware(coresys)
|
||||
self.webapp = web.Application(
|
||||
middlewares=[self.security.token_validation])
|
||||
|
||||
# service stuff
|
||||
self._runner = web.AppRunner(self.webapp)
|
||||
self._site = None
|
||||
|
||||
async def load(self):
|
||||
"""Register REST API Calls."""
|
||||
self._register_supervisor()
|
||||
self._register_host()
|
||||
self._register_hassos()
|
||||
self._register_hardware()
|
||||
self._register_homeassistant()
|
||||
self._register_proxy()
|
||||
self._register_panel()
|
||||
self._register_addons()
|
||||
self._register_snapshots()
|
||||
self._register_discovery()
|
||||
self._register_services()
|
||||
self._register_info()
|
||||
self._register_auth()
|
||||
|
||||
def _register_host(self):
|
||||
"""Register hostcontrol functions."""
|
||||
api_host = APIHost()
|
||||
api_host.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/host/info', api_host.info),
|
||||
web.post('/host/reboot', api_host.reboot),
|
||||
web.post('/host/shutdown', api_host.shutdown),
|
||||
web.post('/host/reload', api_host.reload),
|
||||
web.post('/host/options', api_host.options),
|
||||
web.get('/host/services', api_host.services),
|
||||
web.post('/host/services/{service}/stop', api_host.service_stop),
|
||||
web.post('/host/services/{service}/start', api_host.service_start),
|
||||
web.post('/host/services/{service}/restart',
|
||||
api_host.service_restart),
|
||||
web.post('/host/services/{service}/reload',
|
||||
api_host.service_reload),
|
||||
])
|
||||
|
||||
def _register_hassos(self):
|
||||
"""Register HassOS functions."""
|
||||
api_hassos = APIHassOS()
|
||||
api_hassos.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/hassos/info', api_hassos.info),
|
||||
web.post('/hassos/update', api_hassos.update),
|
||||
web.post('/hassos/update/cli', api_hassos.update_cli),
|
||||
web.post('/hassos/config/sync', api_hassos.config_sync),
|
||||
])
|
||||
|
||||
def _register_hardware(self):
|
||||
"""Register hardware functions."""
|
||||
api_hardware = APIHardware()
|
||||
api_hardware.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/hardware/info', api_hardware.info),
|
||||
web.get('/hardware/audio', api_hardware.audio),
|
||||
])
|
||||
|
||||
def _register_info(self):
|
||||
"""Register info functions."""
|
||||
api_info = APIInfo()
|
||||
api_info.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/info', api_info.info),
|
||||
])
|
||||
|
||||
def _register_auth(self):
|
||||
"""Register auth functions."""
|
||||
api_auth = APIAuth()
|
||||
api_auth.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.post('/auth', api_auth.auth),
|
||||
])
|
||||
|
||||
def _register_supervisor(self):
|
||||
"""Register Supervisor functions."""
|
||||
api_supervisor = APISupervisor()
|
||||
api_supervisor.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/supervisor/ping', api_supervisor.ping),
|
||||
web.get('/supervisor/info', api_supervisor.info),
|
||||
web.get('/supervisor/stats', api_supervisor.stats),
|
||||
web.get('/supervisor/logs', api_supervisor.logs),
|
||||
web.post('/supervisor/update', api_supervisor.update),
|
||||
web.post('/supervisor/reload', api_supervisor.reload),
|
||||
web.post('/supervisor/options', api_supervisor.options),
|
||||
])
|
||||
|
||||
def _register_homeassistant(self):
|
||||
"""Register Home Assistant functions."""
|
||||
api_hass = APIHomeAssistant()
|
||||
api_hass.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/info', api_hass.info),
|
||||
web.get('/homeassistant/logs', api_hass.logs),
|
||||
web.get('/homeassistant/stats', api_hass.stats),
|
||||
web.post('/homeassistant/options', api_hass.options),
|
||||
web.post('/homeassistant/update', api_hass.update),
|
||||
web.post('/homeassistant/restart', api_hass.restart),
|
||||
web.post('/homeassistant/stop', api_hass.stop),
|
||||
web.post('/homeassistant/start', api_hass.start),
|
||||
web.post('/homeassistant/check', api_hass.check),
|
||||
])
|
||||
|
||||
def _register_proxy(self):
|
||||
"""Register Home Assistant API Proxy."""
|
||||
api_proxy = APIProxy()
|
||||
api_proxy.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/homeassistant/api/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/websocket', api_proxy.websocket),
|
||||
web.get('/homeassistant/api/stream', api_proxy.stream),
|
||||
web.post('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/{path:.+}', api_proxy.api),
|
||||
web.get('/homeassistant/api/', api_proxy.api),
|
||||
])
|
||||
|
||||
def _register_addons(self):
|
||||
"""Register Add-on functions."""
|
||||
api_addons = APIAddons()
|
||||
api_addons.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/addons', api_addons.list),
|
||||
web.post('/addons/reload', api_addons.reload),
|
||||
web.get('/addons/{addon}/info', api_addons.info),
|
||||
web.post('/addons/{addon}/install', api_addons.install),
|
||||
web.post('/addons/{addon}/uninstall', api_addons.uninstall),
|
||||
web.post('/addons/{addon}/start', api_addons.start),
|
||||
web.post('/addons/{addon}/stop', api_addons.stop),
|
||||
web.post('/addons/{addon}/restart', api_addons.restart),
|
||||
web.post('/addons/{addon}/update', api_addons.update),
|
||||
web.post('/addons/{addon}/options', api_addons.options),
|
||||
web.post('/addons/{addon}/rebuild', api_addons.rebuild),
|
||||
web.get('/addons/{addon}/logs', api_addons.logs),
|
||||
web.get('/addons/{addon}/icon', api_addons.icon),
|
||||
web.get('/addons/{addon}/logo', api_addons.logo),
|
||||
web.get('/addons/{addon}/changelog', api_addons.changelog),
|
||||
web.post('/addons/{addon}/stdin', api_addons.stdin),
|
||||
web.post('/addons/{addon}/security', api_addons.security),
|
||||
web.get('/addons/{addon}/stats', api_addons.stats),
|
||||
])
|
||||
|
||||
def _register_snapshots(self):
|
||||
"""Register snapshots functions."""
|
||||
api_snapshots = APISnapshots()
|
||||
api_snapshots.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/snapshots', api_snapshots.list),
|
||||
web.post('/snapshots/reload', api_snapshots.reload),
|
||||
web.post('/snapshots/new/full', api_snapshots.snapshot_full),
|
||||
web.post('/snapshots/new/partial', api_snapshots.snapshot_partial),
|
||||
web.post('/snapshots/new/upload', api_snapshots.upload),
|
||||
web.get('/snapshots/{snapshot}/info', api_snapshots.info),
|
||||
web.post('/snapshots/{snapshot}/remove', api_snapshots.remove),
|
||||
web.post('/snapshots/{snapshot}/restore/full',
|
||||
api_snapshots.restore_full),
|
||||
web.post('/snapshots/{snapshot}/restore/partial',
|
||||
api_snapshots.restore_partial),
|
||||
web.get('/snapshots/{snapshot}/download', api_snapshots.download),
|
||||
])
|
||||
|
||||
def _register_services(self):
|
||||
"""Register services functions."""
|
||||
api_services = APIServices()
|
||||
api_services.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/services', api_services.list),
|
||||
web.get('/services/{service}', api_services.get_service),
|
||||
web.post('/services/{service}', api_services.set_service),
|
||||
web.delete('/services/{service}', api_services.del_service),
|
||||
])
|
||||
|
||||
def _register_discovery(self):
|
||||
"""Register discovery functions."""
|
||||
api_discovery = APIDiscovery()
|
||||
api_discovery.coresys = self.coresys
|
||||
|
||||
self.webapp.add_routes([
|
||||
web.get('/discovery', api_discovery.list),
|
||||
web.get('/discovery/{uuid}', api_discovery.get_discovery),
|
||||
web.delete('/discovery/{uuid}', api_discovery.del_discovery),
|
||||
web.post('/discovery', api_discovery.set_discovery),
|
||||
])
|
||||
|
||||
def _register_panel(self):
|
||||
"""Register panel for Home Assistant."""
|
||||
panel_dir = Path(__file__).parent.joinpath("panel")
|
||||
|
||||
def create_response(panel_file):
|
||||
"""Create a function to generate a response."""
|
||||
path = panel_dir.joinpath(f"{panel_file!s}.html")
|
||||
return lambda request: web.FileResponse(path)
|
||||
|
||||
# This route is for backwards compatibility with HA < 0.58
|
||||
self.webapp.add_routes(
|
||||
[web.get('/panel', create_response('hassio-main-es5'))])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.58 - 0.61
|
||||
self.webapp.add_routes([
|
||||
web.get('/panel_es5', create_response('hassio-main-es5')),
|
||||
web.get('/panel_latest', create_response('hassio-main-latest')),
|
||||
])
|
||||
|
||||
# This route is for backwards compatibility with HA 0.62 - 0.70
|
||||
self.webapp.add_routes([
|
||||
web.get('/app-es5/index.html', create_response('index')),
|
||||
web.get('/app-es5/hassio-app.html', create_response('hassio-app')),
|
||||
])
|
||||
|
||||
# This route is for HA > 0.70
|
||||
self.webapp.add_routes([web.static('/app', panel_dir)])
|
||||
|
||||
async def start(self):
|
||||
"""Run RESTful API webserver."""
|
||||
await self._runner.setup()
|
||||
self._site = web.TCPSite(
|
||||
self._runner, host="0.0.0.0", port=80, shutdown_timeout=5)
|
||||
|
||||
try:
|
||||
await self._site.start()
|
||||
except OSError as err:
|
||||
_LOGGER.fatal("Failed to create HTTP server at 0.0.0.0:80 -> %s",
|
||||
err)
|
||||
else:
|
||||
_LOGGER.info("Start API on %s", self.sys_docker.network.supervisor)
|
||||
|
||||
async def stop(self):
|
||||
"""Stop RESTful API webserver."""
|
||||
if not self._site:
|
||||
return
|
||||
|
||||
# Shutdown running API
|
||||
await self._site.stop()
|
||||
await self._runner.cleanup()
|
||||
|
||||
_LOGGER.info("Stop API on %s", self.sys_docker.network.supervisor)
|
@@ -1,342 +0,0 @@
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..addons.utils import rating_security
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_LAST_VERSION, ATTR_STATE, ATTR_BOOT, ATTR_OPTIONS,
|
||||
ATTR_URL, ATTR_DESCRIPTON, ATTR_DETACHED, ATTR_NAME, ATTR_REPOSITORY,
|
||||
ATTR_BUILD, ATTR_AUTO_UPDATE, ATTR_NETWORK, ATTR_HOST_NETWORK, ATTR_SLUG,
|
||||
ATTR_SOURCE, ATTR_REPOSITORIES, ATTR_ADDONS, ATTR_ARCH, ATTR_MAINTAINER,
|
||||
ATTR_INSTALLED, ATTR_LOGO, ATTR_WEBUI, ATTR_DEVICES, ATTR_PRIVILEGED,
|
||||
ATTR_AUDIO, ATTR_AUDIO_INPUT, ATTR_AUDIO_OUTPUT, ATTR_HASSIO_API,
|
||||
ATTR_GPIO, ATTR_HOMEASSISTANT_API, ATTR_STDIN, BOOT_AUTO, BOOT_MANUAL,
|
||||
ATTR_CHANGELOG, ATTR_HOST_IPC, ATTR_HOST_DBUS, ATTR_LONG_DESCRIPTION,
|
||||
ATTR_CPU_PERCENT, ATTR_MEMORY_LIMIT, ATTR_MEMORY_USAGE, ATTR_NETWORK_TX,
|
||||
ATTR_NETWORK_RX, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_ICON, ATTR_SERVICES,
|
||||
ATTR_DISCOVERY, ATTR_APPARMOR, ATTR_DEVICETREE, ATTR_DOCKER_API,
|
||||
ATTR_FULL_ACCESS, ATTR_PROTECTED, ATTR_RATING, ATTR_HOST_PID,
|
||||
ATTR_HASSIO_ROLE, ATTR_MACHINE, ATTR_AVAILABLE, ATTR_AUTH_API,
|
||||
ATTR_KERNEL_MODULES,
|
||||
CONTENT_TYPE_PNG, CONTENT_TYPE_BINARY, CONTENT_TYPE_TEXT, REQUEST_FROM)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import DOCKER_PORTS, ALSA_DEVICE
|
||||
from ..exceptions import APIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT): vol.In([BOOT_AUTO, BOOT_MANUAL]),
|
||||
vol.Optional(ATTR_NETWORK): vol.Any(None, DOCKER_PORTS),
|
||||
vol.Optional(ATTR_AUTO_UPDATE): vol.Boolean(),
|
||||
vol.Optional(ATTR_AUDIO_OUTPUT): ALSA_DEVICE,
|
||||
vol.Optional(ATTR_AUDIO_INPUT): ALSA_DEVICE,
|
||||
})
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_SECURITY = vol.Schema({
|
||||
vol.Optional(ATTR_PROTECTED): vol.Boolean(),
|
||||
})
|
||||
|
||||
|
||||
class APIAddons(CoreSysAttributes):
|
||||
"""Handle RESTful API for add-on functions."""
|
||||
|
||||
def _extract_addon(self, request, check_installed=True):
|
||||
"""Return addon, throw an exception it it doesn't exist."""
|
||||
addon_slug = request.match_info.get('addon')
|
||||
|
||||
# Lookup itself
|
||||
if addon_slug == 'self':
|
||||
return request.get(REQUEST_FROM)
|
||||
|
||||
addon = self.sys_addons.get(addon_slug)
|
||||
if not addon:
|
||||
raise APIError("Addon does not exist")
|
||||
|
||||
if check_installed and not addon.is_installed:
|
||||
raise APIError("Addon is not installed")
|
||||
|
||||
return addon
|
||||
|
||||
@api_process
|
||||
async def list(self, request):
|
||||
"""Return all add-ons or repositories."""
|
||||
data_addons = []
|
||||
for addon in self.sys_addons.list_addons:
|
||||
data_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
|
||||
data_repositories = []
|
||||
for repository in self.sys_addons.list_repositories:
|
||||
data_repositories.append({
|
||||
ATTR_SLUG: repository.slug,
|
||||
ATTR_NAME: repository.name,
|
||||
ATTR_SOURCE: repository.source,
|
||||
ATTR_URL: repository.url,
|
||||
ATTR_MAINTAINER: repository.maintainer,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_ADDONS: data_addons,
|
||||
ATTR_REPOSITORIES: data_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload all add-on data."""
|
||||
await asyncio.shield(self.sys_addons.reload())
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return add-on information."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
|
||||
return {
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_LONG_DESCRIPTION: addon.long_description,
|
||||
ATTR_VERSION: addon.version_installed,
|
||||
ATTR_AUTO_UPDATE: addon.auto_update,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_LAST_VERSION: addon.last_version,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_PROTECTED: addon.protected,
|
||||
ATTR_RATING: rating_security(addon),
|
||||
ATTR_BOOT: addon.boot,
|
||||
ATTR_OPTIONS: addon.options,
|
||||
ATTR_ARCH: addon.supported_arch,
|
||||
ATTR_MACHINE: addon.supported_machine,
|
||||
ATTR_URL: addon.url,
|
||||
ATTR_DETACHED: addon.is_detached,
|
||||
ATTR_AVAILABLE: addon.available,
|
||||
ATTR_BUILD: addon.need_build,
|
||||
ATTR_NETWORK: addon.ports,
|
||||
ATTR_HOST_NETWORK: addon.host_network,
|
||||
ATTR_HOST_PID: addon.host_pid,
|
||||
ATTR_HOST_IPC: addon.host_ipc,
|
||||
ATTR_HOST_DBUS: addon.host_dbus,
|
||||
ATTR_PRIVILEGED: addon.privileged,
|
||||
ATTR_FULL_ACCESS: addon.with_full_access,
|
||||
ATTR_APPARMOR: addon.apparmor,
|
||||
ATTR_DEVICES: _pretty_devices(addon),
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
ATTR_CHANGELOG: addon.with_changelog,
|
||||
ATTR_WEBUI: addon.webui,
|
||||
ATTR_STDIN: addon.with_stdin,
|
||||
ATTR_HASSIO_API: addon.access_hassio_api,
|
||||
ATTR_HASSIO_ROLE: addon.hassio_role,
|
||||
ATTR_AUTH_API: addon.access_auth_api,
|
||||
ATTR_HOMEASSISTANT_API: addon.access_homeassistant_api,
|
||||
ATTR_GPIO: addon.with_gpio,
|
||||
ATTR_KERNEL_MODULES: addon.with_kernel_modules,
|
||||
ATTR_DEVICETREE: addon.with_devicetree,
|
||||
ATTR_DOCKER_API: addon.access_docker_api,
|
||||
ATTR_AUDIO: addon.with_audio,
|
||||
ATTR_AUDIO_INPUT: addon.audio_input,
|
||||
ATTR_AUDIO_OUTPUT: addon.audio_output,
|
||||
ATTR_SERVICES: _pretty_services(addon),
|
||||
ATTR_DISCOVERY: addon.discovery,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Store user options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
addon_schema = SCHEMA_OPTIONS.extend({
|
||||
vol.Optional(ATTR_OPTIONS): vol.Any(None, addon.schema),
|
||||
})
|
||||
|
||||
body = await api_validate(addon_schema, request)
|
||||
|
||||
if ATTR_OPTIONS in body:
|
||||
addon.options = body[ATTR_OPTIONS]
|
||||
if ATTR_BOOT in body:
|
||||
addon.boot = body[ATTR_BOOT]
|
||||
if ATTR_AUTO_UPDATE in body:
|
||||
addon.auto_update = body[ATTR_AUTO_UPDATE]
|
||||
if ATTR_NETWORK in body:
|
||||
addon.ports = body[ATTR_NETWORK]
|
||||
if ATTR_AUDIO_INPUT in body:
|
||||
addon.audio_input = body[ATTR_AUDIO_INPUT]
|
||||
if ATTR_AUDIO_OUTPUT in body:
|
||||
addon.audio_output = body[ATTR_AUDIO_OUTPUT]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def security(self, request):
|
||||
"""Store security options for add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
body = await api_validate(SCHEMA_SECURITY, request)
|
||||
|
||||
if ATTR_PROTECTED in body:
|
||||
_LOGGER.warning("Protected flag changing for %s!", addon.slug)
|
||||
addon.protected = body[ATTR_PROTECTED]
|
||||
|
||||
addon.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
addon = self._extract_addon(request)
|
||||
stats = await addon.stats()
|
||||
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
def install(self, request):
|
||||
"""Install add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
return asyncio.shield(addon.install())
|
||||
|
||||
@api_process
|
||||
def uninstall(self, request):
|
||||
"""Uninstall add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.uninstall())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
"""Start add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
# check options
|
||||
options = addon.options
|
||||
try:
|
||||
addon.schema(options)
|
||||
except vol.Invalid as ex:
|
||||
raise APIError(humanize_error(options, ex)) from None
|
||||
|
||||
return asyncio.shield(addon.start())
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.stop())
|
||||
|
||||
@api_process
|
||||
def update(self, request):
|
||||
"""Update add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
|
||||
if addon.last_version == addon.version_installed:
|
||||
raise APIError("No update available!")
|
||||
|
||||
return asyncio.shield(addon.update())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return asyncio.shield(addon.restart())
|
||||
|
||||
@api_process
|
||||
def rebuild(self, request):
|
||||
"""Rebuild local build add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.need_build:
|
||||
raise APIError("Only local build addons are supported")
|
||||
|
||||
return asyncio.shield(addon.rebuild())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return logs from add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
return addon.logs()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def icon(self, request):
|
||||
"""Return icon from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_icon:
|
||||
raise APIError("No icon found!")
|
||||
|
||||
with addon.path_icon.open('rb') as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_PNG)
|
||||
async def logo(self, request):
|
||||
"""Return logo from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_logo:
|
||||
raise APIError("No logo found!")
|
||||
|
||||
with addon.path_logo.open('rb') as png:
|
||||
return png.read()
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_TEXT)
|
||||
async def changelog(self, request):
|
||||
"""Return changelog from add-on."""
|
||||
addon = self._extract_addon(request, check_installed=False)
|
||||
if not addon.with_changelog:
|
||||
raise APIError("No changelog found!")
|
||||
|
||||
with addon.path_changelog.open('r') as changelog:
|
||||
return changelog.read()
|
||||
|
||||
@api_process
|
||||
async def stdin(self, request):
|
||||
"""Write to stdin of add-on."""
|
||||
addon = self._extract_addon(request)
|
||||
if not addon.with_stdin:
|
||||
raise APIError("STDIN not supported by add-on")
|
||||
|
||||
data = await request.read()
|
||||
return await asyncio.shield(addon.write_stdin(data))
|
||||
|
||||
|
||||
def _pretty_devices(addon):
|
||||
"""Return a simplified device list."""
|
||||
dev_list = addon.devices
|
||||
if not dev_list:
|
||||
return None
|
||||
return [row.split(':')[0] for row in dev_list]
|
||||
|
||||
|
||||
def _pretty_services(addon):
|
||||
"""Return a simplified services role list."""
|
||||
services = []
|
||||
for name, access in addon.services_role.items():
|
||||
services.append(f"{name}:{access}")
|
||||
return services
|
@@ -1,61 +0,0 @@
|
||||
"""Init file for Hass.io auth/SSO RESTful API."""
|
||||
import logging
|
||||
|
||||
from aiohttp import BasicAuth
|
||||
from aiohttp.web_exceptions import HTTPUnauthorized
|
||||
from aiohttp.hdrs import CONTENT_TYPE, AUTHORIZATION, WWW_AUTHENTICATE
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import REQUEST_FROM, CONTENT_TYPE_JSON, CONTENT_TYPE_URL
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIForbidden
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIAuth(CoreSysAttributes):
|
||||
"""Handle RESTful API for auth functions."""
|
||||
|
||||
def _process_basic(self, request, addon):
|
||||
"""Process login request with basic auth.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
auth = BasicAuth.decode(request.headers[AUTHORIZATION])
|
||||
return self.sys_auth.check_login(addon, auth.login, auth.password)
|
||||
|
||||
def _process_dict(self, request, addon, data):
|
||||
"""Process login with dict data.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
username = data.get('username') or data.get('user')
|
||||
password = data.get('password')
|
||||
|
||||
return self.sys_auth.check_login(addon, username, password)
|
||||
|
||||
@api_process
|
||||
async def auth(self, request):
|
||||
"""Process login request."""
|
||||
addon = request[REQUEST_FROM]
|
||||
|
||||
if not addon.access_auth_api:
|
||||
raise APIForbidden("Can't use Home Assistant auth!")
|
||||
|
||||
# BasicAuth
|
||||
if AUTHORIZATION in request.headers:
|
||||
return await self._process_basic(request, addon)
|
||||
|
||||
# Json
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_JSON:
|
||||
data = await request.json()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
# URL encoded
|
||||
if request.headers.get(CONTENT_TYPE) == CONTENT_TYPE_URL:
|
||||
data = await request.post()
|
||||
return await self._process_dict(request, addon, data)
|
||||
|
||||
raise HTTPUnauthorized(headers={
|
||||
WWW_AUTHENTICATE: "Basic realm=\"Hass.io Authentication\""
|
||||
})
|
@@ -1,34 +0,0 @@
|
||||
"""Init file for Hass.io hardware RESTful API."""
|
||||
import logging
|
||||
|
||||
from .utils import api_process
|
||||
from ..const import (
|
||||
ATTR_SERIAL, ATTR_DISK, ATTR_GPIO, ATTR_AUDIO, ATTR_INPUT, ATTR_OUTPUT)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIHardware(CoreSysAttributes):
|
||||
"""Handle RESTful API for hardware functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show hardware info."""
|
||||
return {
|
||||
ATTR_SERIAL: list(self.sys_hardware.serial_devices),
|
||||
ATTR_INPUT: list(self.sys_hardware.input_devices),
|
||||
ATTR_DISK: list(self.sys_hardware.disk_devices),
|
||||
ATTR_GPIO: list(self.sys_hardware.gpio_devices),
|
||||
ATTR_AUDIO: self.sys_hardware.audio_devices,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def audio(self, request):
|
||||
"""Show ALSA audio devices."""
|
||||
return {
|
||||
ATTR_AUDIO: {
|
||||
ATTR_INPUT: self.sys_host.alsa.input_devices,
|
||||
ATTR_OUTPUT: self.sys_host.alsa.output_devices,
|
||||
}
|
||||
}
|
@@ -1,53 +0,0 @@
|
||||
"""Init file for Hass.io HassOS RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_validate
|
||||
from ..const import (
|
||||
ATTR_VERSION, ATTR_BOARD, ATTR_VERSION_LATEST, ATTR_VERSION_CLI,
|
||||
ATTR_VERSION_CLI_LATEST)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHassOS(CoreSysAttributes):
|
||||
"""Handle RESTful API for HassOS functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return HassOS information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_hassos.version,
|
||||
ATTR_VERSION_CLI: self.sys_hassos.version_cli,
|
||||
ATTR_VERSION_LATEST: self.sys_hassos.version_latest,
|
||||
ATTR_VERSION_CLI_LATEST: self.sys_hassos.version_cli_latest,
|
||||
ATTR_BOARD: self.sys_hassos.board,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update HassOS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update(version))
|
||||
|
||||
@api_process
|
||||
async def update_cli(self, request):
|
||||
"""Update HassOS CLI."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_hassos.version_cli_latest)
|
||||
|
||||
await asyncio.shield(self.sys_hassos.update_cli(version))
|
||||
|
||||
@api_process
|
||||
def config_sync(self, request):
|
||||
"""Trigger config reload on HassOS."""
|
||||
return asyncio.shield(self.sys_hassos.config_sync())
|
@@ -1,149 +0,0 @@
|
||||
"""Init file for Hass.io Home Assistant RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from ..const import (
|
||||
ATTR_ARCH, ATTR_BLK_READ, ATTR_BLK_WRITE, ATTR_BOOT, ATTR_CPU_PERCENT,
|
||||
ATTR_CUSTOM, ATTR_IMAGE, ATTR_LAST_VERSION, ATTR_MACHINE, ATTR_MEMORY_LIMIT,
|
||||
ATTR_MEMORY_USAGE, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_PASSWORD,
|
||||
ATTR_PORT, ATTR_REFRESH_TOKEN, ATTR_SSL, ATTR_VERSION, ATTR_WAIT_BOOT,
|
||||
ATTR_WATCHDOG, CONTENT_TYPE_BINARY)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import APIError
|
||||
from ..validate import DOCKER_IMAGE, NETWORK_PORT
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=no-value-for-parameter
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_BOOT):
|
||||
vol.Boolean(),
|
||||
vol.Inclusive(ATTR_IMAGE, 'custom_hass'):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
vol.Inclusive(ATTR_LAST_VERSION, 'custom_hass'):
|
||||
vol.Any(None, DOCKER_IMAGE),
|
||||
vol.Optional(ATTR_PORT):
|
||||
NETWORK_PORT,
|
||||
vol.Optional(ATTR_PASSWORD):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
vol.Optional(ATTR_SSL):
|
||||
vol.Boolean(),
|
||||
vol.Optional(ATTR_WATCHDOG):
|
||||
vol.Boolean(),
|
||||
vol.Optional(ATTR_WAIT_BOOT):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=60)),
|
||||
vol.Optional(ATTR_REFRESH_TOKEN):
|
||||
vol.Maybe(vol.Coerce(str)),
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APIHomeAssistant(CoreSysAttributes):
|
||||
"""Handle RESTful API for Home Assistant functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
return {
|
||||
ATTR_VERSION: self.sys_homeassistant.version,
|
||||
ATTR_LAST_VERSION: self.sys_homeassistant.last_version,
|
||||
ATTR_MACHINE: self.sys_homeassistant.machine,
|
||||
ATTR_ARCH: self.sys_homeassistant.arch,
|
||||
ATTR_IMAGE: self.sys_homeassistant.image,
|
||||
ATTR_CUSTOM: self.sys_homeassistant.is_custom_image,
|
||||
ATTR_BOOT: self.sys_homeassistant.boot,
|
||||
ATTR_PORT: self.sys_homeassistant.api_port,
|
||||
ATTR_SSL: self.sys_homeassistant.api_ssl,
|
||||
ATTR_WATCHDOG: self.sys_homeassistant.watchdog,
|
||||
ATTR_WAIT_BOOT: self.sys_homeassistant.wait_boot,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set Home Assistant options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_IMAGE in body and ATTR_LAST_VERSION in body:
|
||||
self.sys_homeassistant.image = body[ATTR_IMAGE]
|
||||
self.sys_homeassistant.last_version = body[ATTR_LAST_VERSION]
|
||||
|
||||
if ATTR_BOOT in body:
|
||||
self.sys_homeassistant.boot = body[ATTR_BOOT]
|
||||
|
||||
if ATTR_PORT in body:
|
||||
self.sys_homeassistant.api_port = body[ATTR_PORT]
|
||||
|
||||
if ATTR_PASSWORD in body:
|
||||
self.sys_homeassistant.api_password = body[ATTR_PASSWORD]
|
||||
|
||||
if ATTR_SSL in body:
|
||||
self.sys_homeassistant.api_ssl = body[ATTR_SSL]
|
||||
|
||||
if ATTR_WATCHDOG in body:
|
||||
self.sys_homeassistant.watchdog = body[ATTR_WATCHDOG]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_homeassistant.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_REFRESH_TOKEN in body:
|
||||
self.sys_homeassistant.refresh_token = body[ATTR_REFRESH_TOKEN]
|
||||
|
||||
self.sys_homeassistant.save_data()
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_homeassistant.stats()
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update Home Assistant."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_homeassistant.last_version)
|
||||
|
||||
await asyncio.shield(self.sys_homeassistant.update(version))
|
||||
|
||||
@api_process
|
||||
def stop(self, request):
|
||||
"""Stop Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.stop())
|
||||
|
||||
@api_process
|
||||
def start(self, request):
|
||||
"""Start Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.start())
|
||||
|
||||
@api_process
|
||||
def restart(self, request):
|
||||
"""Restart Home Assistant."""
|
||||
return asyncio.shield(self.sys_homeassistant.restart())
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return Home Assistant Docker logs."""
|
||||
return self.sys_homeassistant.logs()
|
||||
|
||||
@api_process
|
||||
async def check(self, request):
|
||||
"""Check configuration of Home Assistant."""
|
||||
result = await self.sys_homeassistant.check_config()
|
||||
if not result.valid:
|
||||
raise APIError(result.log)
|
@@ -1,28 +0,0 @@
|
||||
"""Init file for Hass.io info RESTful API."""
|
||||
import logging
|
||||
|
||||
from ..const import (ATTR_ARCH, ATTR_CHANNEL, ATTR_HASSOS, ATTR_HOMEASSISTANT,
|
||||
ATTR_HOSTNAME, ATTR_MACHINE, ATTR_SUPERVISOR,
|
||||
ATTR_SUPPORTED_ARCH)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .utils import api_process
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIInfo(CoreSysAttributes):
|
||||
"""Handle RESTful API for info functions."""
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Show system info."""
|
||||
return {
|
||||
ATTR_SUPERVISOR: self.sys_supervisor.version,
|
||||
ATTR_HOMEASSISTANT: self.sys_homeassistant.version,
|
||||
ATTR_HASSOS: self.sys_hassos.version,
|
||||
ATTR_HOSTNAME: self.sys_host.info.hostname,
|
||||
ATTR_MACHINE: self.sys_machine,
|
||||
ATTR_ARCH: self.sys_arch.default,
|
||||
ATTR_SUPPORTED_ARCH: self.sys_arch.supported,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
}
|
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
(window.webpackJsonp=window.webpackJsonp||[]).push([[4],{100:function(n,r,t){"use strict";t.r(r),t.d(r,"marked",function(){return a}),t.d(r,"filterXSS",function(){return c});var e=t(89),i=t.n(e),o=t(91),u=t.n(o),a=i.a,c=u.a}}]);
|
||||
//# sourceMappingURL=chunk.9e3883f96f68b3ce89f5.js.map
|
Binary file not shown.
@@ -1 +0,0 @@
|
||||
{"version":3,"sources":["webpack:///../src/resources/load_markdown.js"],"names":["__webpack_require__","r","__webpack_exports__","d","marked","filterXSS","marked__WEBPACK_IMPORTED_MODULE_0__","marked__WEBPACK_IMPORTED_MODULE_0___default","n","xss__WEBPACK_IMPORTED_MODULE_1__","xss__WEBPACK_IMPORTED_MODULE_1___default","marked_","filterXSS_"],"mappings":"0FAAAA,EAAAC,EAAAC,GAAAF,EAAAG,EAAAD,EAAA,2BAAAE,IAAAJ,EAAAG,EAAAD,EAAA,8BAAAG,IAAA,IAAAC,EAAAN,EAAA,IAAAO,EAAAP,EAAAQ,EAAAF,GAAAG,EAAAT,EAAA,IAAAU,EAAAV,EAAAQ,EAAAC,GAGaL,EAASO,IACTN,EAAYO","file":"chunk.9e3883f96f68b3ce89f5.js","sourcesContent":["import marked_ from \"marked\";\nimport filterXSS_ from \"xss\";\n\nexport const marked = marked_;\nexport const filterXSS = filterXSS_;\n"],"sourceRoot":""}
|
File diff suppressed because one or more lines are too long
@@ -1,820 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2014 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
* @license
|
||||
* Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
* This code may only be used under the BSD style license found at
|
||||
* http://polymer.github.io/LICENSE.txt
|
||||
* The complete set of authors may be found at
|
||||
* http://polymer.github.io/AUTHORS.txt
|
||||
* The complete set of contributors may be found at
|
||||
* http://polymer.github.io/CONTRIBUTORS.txt
|
||||
* Code distributed by Google as part of the polymer project is also
|
||||
* subject to an additional IP rights grant found at
|
||||
* http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2016 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -1,471 +0,0 @@
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2015 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at
|
||||
http://polymer.github.io/LICENSE.txt The complete set of authors may be found at
|
||||
http://polymer.github.io/AUTHORS.txt The complete set of contributors may be
|
||||
found at http://polymer.github.io/CONTRIBUTORS.txt Code distributed by Google as
|
||||
part of the polymer project is also subject to an additional IP rights grant
|
||||
found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
||||
|
||||
/**
|
||||
@license
|
||||
Copyright (c) 2017 The Polymer Project Authors. All rights reserved.
|
||||
This code may only be used under the BSD style license found at http://polymer.github.io/LICENSE.txt
|
||||
The complete set of authors may be found at http://polymer.github.io/AUTHORS.txt
|
||||
The complete set of contributors may be found at http://polymer.github.io/CONTRIBUTORS.txt
|
||||
Code distributed by Google as part of the polymer project is also
|
||||
subject to an additional IP rights grant found at http://polymer.github.io/PATENTS.txt
|
||||
*/
|
Binary file not shown.
File diff suppressed because one or more lines are too long
@@ -1,2 +0,0 @@
|
||||
!function(e){function n(n){for(var t,o,i=n[0],u=n[1],c=0,f=[];c<i.length;c++)o=i[c],r[o]&&f.push(r[o][0]),r[o]=0;for(t in u)Object.prototype.hasOwnProperty.call(u,t)&&(e[t]=u[t]);for(a&&a(n);f.length;)f.shift()()}var t={},r={1:0};function o(n){if(t[n])return t[n].exports;var r=t[n]={i:n,l:!1,exports:{}};return e[n].call(r.exports,r,r.exports,o),r.l=!0,r.exports}o.e=function(e){var n=[],t=r[e];if(0!==t)if(t)n.push(t[2]);else{var i=new Promise(function(n,o){t=r[e]=[n,o]});n.push(t[2]=i);var u,c=document.getElementsByTagName("head")[0],a=document.createElement("script");a.charset="utf-8",a.timeout=120,o.nc&&a.setAttribute("nonce",o.nc),a.src=function(e){return o.p+"chunk."+{0:"f32f3c841cc3e1d081f7",2:"8c049a124b9397e54c16",3:"d0eb7b86b775838caf5e",4:"9e3883f96f68b3ce89f5",5:"0cb8b788b03dcc48da14",6:"c1ac97370d72bce0a835",7:"0853908528652fbc5d4f"}[e]+".js"}(e),u=function(n){a.onerror=a.onload=null,clearTimeout(f);var t=r[e];if(0!==t){if(t){var o=n&&("load"===n.type?"missing":n.type),i=n&&n.target&&n.target.src,u=new Error("Loading chunk "+e+" failed.\n("+o+": "+i+")");u.type=o,u.request=i,t[1](u)}r[e]=void 0}};var f=setTimeout(function(){u({type:"timeout",target:a})},12e4);a.onerror=a.onload=u,c.appendChild(a)}return Promise.all(n)},o.m=e,o.c=t,o.d=function(e,n,t){o.o(e,n)||Object.defineProperty(e,n,{enumerable:!0,get:t})},o.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},o.t=function(e,n){if(1&n&&(e=o(e)),8&n)return e;if(4&n&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(o.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&n&&"string"!=typeof e)for(var r in e)o.d(t,r,function(n){return e[n]}.bind(null,r));return t},o.n=function(e){var n=e&&e.__esModule?function(){return e.default}:function(){return e};return o.d(n,"a",n),n},o.o=function(e,n){return Object.prototype.hasOwnProperty.call(e,n)},o.p="/api/hassio/app/",o.oe=function(e){throw console.error(e),e};var i=window.webpackJsonp=window.webpackJsonp||[],u=i.push.bind(i);i.push=n,i=i.slice();for(var c=0;c<i.length;c++)n(i[c]);var a=u;o(o.s=0)}([function(e,n,t){window.loadES5Adapter().then(function(){Promise.all([t.e(0),t.e(2)]).then(t.bind(null,2)),Promise.all([t.e(0),t.e(6),t.e(3)]).then(t.bind(null,1))})}]);
|
||||
//# sourceMappingURL=entrypoint.js.map
|
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
File diff suppressed because one or more lines are too long
Binary file not shown.
@@ -1,38 +0,0 @@
|
||||
<!doctype html>
|
||||
<html>
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Hass.io</title>
|
||||
<meta name='viewport' content='width=device-width, user-scalable=no'>
|
||||
<style>
|
||||
body {
|
||||
height: 100vh;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
</style>
|
||||
<script src='/frontend_es5/custom-elements-es5-adapter.js'></script>
|
||||
</head>
|
||||
<body>
|
||||
<hassio-app></hassio-app>
|
||||
<script>
|
||||
function addScript(src) {
|
||||
var e = document.createElement('script');
|
||||
e.src = src;
|
||||
document.write(e.outerHTML);
|
||||
}
|
||||
var webComponentsSupported = (
|
||||
'customElements' in window &&
|
||||
'import' in document.createElement('link') &&
|
||||
'content' in document.createElement('template'));
|
||||
if (!webComponentsSupported) {
|
||||
addScript('/static/webcomponents-lite.js');
|
||||
}
|
||||
</script>
|
||||
<!--
|
||||
Disabled while we make Home Assistant able to serve the right files.
|
||||
<script src="./app.js"></script>
|
||||
-->
|
||||
<link rel='import' href='./hassio-app.html'>
|
||||
</body>
|
||||
</html>
|
Binary file not shown.
@@ -1,138 +0,0 @@
|
||||
"""Init file for Hass.io Supervisor RESTful API."""
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from .utils import api_process, api_process_raw, api_validate
|
||||
from ..const import (
|
||||
ATTR_ADDONS, ATTR_VERSION, ATTR_LAST_VERSION, ATTR_CHANNEL, ATTR_ARCH,
|
||||
HASSIO_VERSION, ATTR_ADDONS_REPOSITORIES, ATTR_LOGO, ATTR_REPOSITORY,
|
||||
ATTR_DESCRIPTON, ATTR_NAME, ATTR_SLUG, ATTR_INSTALLED, ATTR_TIMEZONE,
|
||||
ATTR_STATE, ATTR_WAIT_BOOT, ATTR_CPU_PERCENT, ATTR_MEMORY_USAGE,
|
||||
ATTR_MEMORY_LIMIT, ATTR_NETWORK_RX, ATTR_NETWORK_TX, ATTR_BLK_READ,
|
||||
ATTR_BLK_WRITE, CONTENT_TYPE_BINARY, ATTR_ICON)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..validate import WAIT_BOOT, REPOSITORIES, CHANNELS
|
||||
from ..exceptions import APIError
|
||||
from ..utils.validate import validate_timezone
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCHEMA_OPTIONS = vol.Schema({
|
||||
vol.Optional(ATTR_CHANNEL): CHANNELS,
|
||||
vol.Optional(ATTR_ADDONS_REPOSITORIES): REPOSITORIES,
|
||||
vol.Optional(ATTR_TIMEZONE): validate_timezone,
|
||||
vol.Optional(ATTR_WAIT_BOOT): WAIT_BOOT,
|
||||
})
|
||||
|
||||
SCHEMA_VERSION = vol.Schema({
|
||||
vol.Optional(ATTR_VERSION): vol.Coerce(str),
|
||||
})
|
||||
|
||||
|
||||
class APISupervisor(CoreSysAttributes):
|
||||
"""Handle RESTful API for Supervisor functions."""
|
||||
|
||||
@api_process
|
||||
async def ping(self, request):
|
||||
"""Return ok for signal that the API is ready."""
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def info(self, request):
|
||||
"""Return host information."""
|
||||
list_addons = []
|
||||
for addon in self.sys_addons.list_addons:
|
||||
if addon.is_installed:
|
||||
list_addons.append({
|
||||
ATTR_NAME: addon.name,
|
||||
ATTR_SLUG: addon.slug,
|
||||
ATTR_DESCRIPTON: addon.description,
|
||||
ATTR_STATE: await addon.state(),
|
||||
ATTR_VERSION: addon.last_version,
|
||||
ATTR_INSTALLED: addon.version_installed,
|
||||
ATTR_REPOSITORY: addon.repository,
|
||||
ATTR_ICON: addon.with_icon,
|
||||
ATTR_LOGO: addon.with_logo,
|
||||
})
|
||||
|
||||
return {
|
||||
ATTR_VERSION: HASSIO_VERSION,
|
||||
ATTR_LAST_VERSION: self.sys_updater.version_hassio,
|
||||
ATTR_CHANNEL: self.sys_updater.channel,
|
||||
ATTR_ARCH: self.sys_supervisor.arch,
|
||||
ATTR_WAIT_BOOT: self.sys_config.wait_boot,
|
||||
ATTR_TIMEZONE: self.sys_config.timezone,
|
||||
ATTR_ADDONS: list_addons,
|
||||
ATTR_ADDONS_REPOSITORIES: self.sys_config.addons_repositories,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def options(self, request):
|
||||
"""Set Supervisor options."""
|
||||
body = await api_validate(SCHEMA_OPTIONS, request)
|
||||
|
||||
if ATTR_CHANNEL in body:
|
||||
self.sys_updater.channel = body[ATTR_CHANNEL]
|
||||
|
||||
if ATTR_TIMEZONE in body:
|
||||
self.sys_config.timezone = body[ATTR_TIMEZONE]
|
||||
|
||||
if ATTR_WAIT_BOOT in body:
|
||||
self.sys_config.wait_boot = body[ATTR_WAIT_BOOT]
|
||||
|
||||
if ATTR_ADDONS_REPOSITORIES in body:
|
||||
new = set(body[ATTR_ADDONS_REPOSITORIES])
|
||||
await asyncio.shield(self.sys_addons.load_repositories(new))
|
||||
|
||||
self.sys_updater.save_data()
|
||||
self.sys_config.save_data()
|
||||
return True
|
||||
|
||||
@api_process
|
||||
async def stats(self, request):
|
||||
"""Return resource information."""
|
||||
stats = await self.sys_supervisor.stats()
|
||||
if not stats:
|
||||
raise APIError("No stats available")
|
||||
|
||||
return {
|
||||
ATTR_CPU_PERCENT: stats.cpu_percent,
|
||||
ATTR_MEMORY_USAGE: stats.memory_usage,
|
||||
ATTR_MEMORY_LIMIT: stats.memory_limit,
|
||||
ATTR_NETWORK_RX: stats.network_rx,
|
||||
ATTR_NETWORK_TX: stats.network_tx,
|
||||
ATTR_BLK_READ: stats.blk_read,
|
||||
ATTR_BLK_WRITE: stats.blk_write,
|
||||
}
|
||||
|
||||
@api_process
|
||||
async def update(self, request):
|
||||
"""Update Supervisor OS."""
|
||||
body = await api_validate(SCHEMA_VERSION, request)
|
||||
version = body.get(ATTR_VERSION, self.sys_updater.version_hassio)
|
||||
|
||||
if version == self.sys_supervisor.version:
|
||||
raise APIError("Version {} is already in use".format(version))
|
||||
|
||||
return await asyncio.shield(self.sys_supervisor.update(version))
|
||||
|
||||
@api_process
|
||||
async def reload(self, request):
|
||||
"""Reload add-ons, configuration, etc."""
|
||||
tasks = [
|
||||
self.sys_updater.reload(),
|
||||
]
|
||||
results, _ = await asyncio.shield(asyncio.wait(tasks))
|
||||
|
||||
for result in results:
|
||||
if result.exception() is not None:
|
||||
raise APIError("Some reload task fails!")
|
||||
|
||||
return True
|
||||
|
||||
@api_process_raw(CONTENT_TYPE_BINARY)
|
||||
def logs(self, request):
|
||||
"""Return supervisor Docker logs."""
|
||||
return self.sys_supervisor.logs()
|
@@ -1,95 +0,0 @@
|
||||
"""Init file for Hass.io util for RESTful API."""
|
||||
import json
|
||||
import logging
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from ..const import (
|
||||
JSON_RESULT, JSON_DATA, JSON_MESSAGE, RESULT_OK, RESULT_ERROR,
|
||||
CONTENT_TYPE_BINARY)
|
||||
from ..exceptions import HassioError, APIError, APIForbidden
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def json_loads(data):
|
||||
"""Extract json from string with support for '' and None."""
|
||||
if not data:
|
||||
return {}
|
||||
try:
|
||||
return json.loads(data)
|
||||
except json.JSONDecodeError:
|
||||
raise APIError("Invalid json")
|
||||
|
||||
|
||||
def api_process(method):
|
||||
"""Wrap function with true/false calls to rest api."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return API information."""
|
||||
try:
|
||||
answer = await method(api, *args, **kwargs)
|
||||
except (APIError, APIForbidden) as err:
|
||||
return api_return_error(message=str(err))
|
||||
except HassioError:
|
||||
return api_return_error(message="Unknown Error, see logs")
|
||||
|
||||
if isinstance(answer, dict):
|
||||
return api_return_ok(data=answer)
|
||||
if isinstance(answer, web.Response):
|
||||
return answer
|
||||
elif isinstance(answer, bool) and not answer:
|
||||
return api_return_error()
|
||||
return api_return_ok()
|
||||
|
||||
return wrap_api
|
||||
|
||||
|
||||
def api_process_raw(content):
|
||||
"""Wrap content_type into function."""
|
||||
def wrap_method(method):
|
||||
"""Wrap function with raw output to rest api."""
|
||||
async def wrap_api(api, *args, **kwargs):
|
||||
"""Return api information."""
|
||||
try:
|
||||
msg_data = await method(api, *args, **kwargs)
|
||||
msg_type = content
|
||||
except (APIError, APIForbidden) as err:
|
||||
msg_data = str(err).encode()
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
except HassioError:
|
||||
msg_data = b''
|
||||
msg_type = CONTENT_TYPE_BINARY
|
||||
|
||||
return web.Response(body=msg_data, content_type=msg_type)
|
||||
|
||||
return wrap_api
|
||||
return wrap_method
|
||||
|
||||
|
||||
def api_return_error(message=None):
|
||||
"""Return an API error message."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_ERROR,
|
||||
JSON_MESSAGE: message,
|
||||
}, status=400)
|
||||
|
||||
|
||||
def api_return_ok(data=None):
|
||||
"""Return an API ok answer."""
|
||||
return web.json_response({
|
||||
JSON_RESULT: RESULT_OK,
|
||||
JSON_DATA: data or {},
|
||||
})
|
||||
|
||||
|
||||
async def api_validate(schema, request):
|
||||
"""Validate request data with schema."""
|
||||
data = await request.json(loads=json_loads)
|
||||
try:
|
||||
data = schema(data)
|
||||
except vol.Invalid as ex:
|
||||
raise APIError(humanize_error(data, ex)) from None
|
||||
|
||||
return data
|
@@ -1,49 +0,0 @@
|
||||
{
|
||||
"raspberrypi": [
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi2": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"raspberrypi3-64": [
|
||||
"aarch64",
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"tinker": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"odroid-c2": [
|
||||
"aarch64"
|
||||
],
|
||||
"odroid-xu": [
|
||||
"armv7",
|
||||
"armhf"
|
||||
],
|
||||
"orangepi-prime": [
|
||||
"aarch64"
|
||||
],
|
||||
"qemux86": [
|
||||
"i386"
|
||||
],
|
||||
"qemux86-64": [
|
||||
"amd64",
|
||||
"i386"
|
||||
],
|
||||
"qemuarm": [
|
||||
"armhf"
|
||||
],
|
||||
"qemuarm-64": [
|
||||
"aarch64"
|
||||
],
|
||||
"intel-nuc": [
|
||||
"amd64",
|
||||
"i386"
|
||||
]
|
||||
}
|
@@ -1,95 +0,0 @@
|
||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||
import logging
|
||||
import hashlib
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_AUTH, ATTR_PASSWORD, ATTR_USERNAME, ATTR_ADDON)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_AUTH_CONFIG
|
||||
from .exceptions import AuthError, HomeAssistantAPIError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class Auth(JsonConfig, CoreSysAttributes):
|
||||
"""Manage SSO for Add-ons with Home Assistant user."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize updater."""
|
||||
super().__init__(FILE_HASSIO_AUTH, SCHEMA_AUTH_CONFIG)
|
||||
self.coresys = coresys
|
||||
|
||||
def _check_cache(self, username, password):
|
||||
"""Check password in cache."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) == password_h:
|
||||
_LOGGER.info("Cache hit for %s", username)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("No cache hit for %s", username)
|
||||
return False
|
||||
|
||||
def _update_cache(self, username, password):
|
||||
"""Cache a username, password."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) == password_h:
|
||||
return
|
||||
|
||||
self._data[username_h] = password_h
|
||||
self.save_data()
|
||||
|
||||
def _dismatch_cache(self, username, password):
|
||||
"""Remove user from cache."""
|
||||
username_h = _rehash(username)
|
||||
password_h = _rehash(password, username)
|
||||
|
||||
if self._data.get(username_h) != password_h:
|
||||
return
|
||||
|
||||
self._data.pop(username_h, None)
|
||||
self.save_data()
|
||||
|
||||
async def check_login(self, addon, username, password):
|
||||
"""Check username login."""
|
||||
if password is None:
|
||||
_LOGGER.error("None as password is not supported!")
|
||||
raise AuthError()
|
||||
_LOGGER.info("Auth request from %s for %s", addon.slug, username)
|
||||
|
||||
# Check API state
|
||||
if not await self.sys_homeassistant.check_api_state():
|
||||
_LOGGER.info("Home Assistant not running, check cache")
|
||||
return self._check_cache(username, password)
|
||||
|
||||
try:
|
||||
async with self.sys_homeassistant.make_request(
|
||||
'post', 'api/hassio_auth', json={
|
||||
ATTR_USERNAME: username,
|
||||
ATTR_PASSWORD: password,
|
||||
ATTR_ADDON: addon.slug,
|
||||
}) as req:
|
||||
|
||||
if req.status == 200:
|
||||
_LOGGER.info("Success login from %s", username)
|
||||
self._update_cache(username, password)
|
||||
return True
|
||||
|
||||
_LOGGER.warning("Wrong login from %s", username)
|
||||
self._dismatch_cache(username, password)
|
||||
return False
|
||||
except HomeAssistantAPIError:
|
||||
_LOGGER.error("Can't request auth on Home Assistant!")
|
||||
|
||||
raise AuthError()
|
||||
|
||||
|
||||
def _rehash(value, salt2=""):
|
||||
"""Rehash a value."""
|
||||
for idx in range(1, 20):
|
||||
value = hashlib.sha256(f"{value}{idx}{salt2}".encode()).hexdigest()
|
||||
return value
|
@@ -1,206 +0,0 @@
|
||||
"""Bootstrap Hass.io."""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import signal
|
||||
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
from .addons import AddonManager
|
||||
from .api import RestAPI
|
||||
from .arch import CpuArch
|
||||
from .auth import Auth
|
||||
from .const import SOCKET_DOCKER
|
||||
from .core import HassIO
|
||||
from .coresys import CoreSys
|
||||
from .dbus import DBusManager
|
||||
from .discovery import Discovery
|
||||
from .hassos import HassOS
|
||||
from .homeassistant import HomeAssistant
|
||||
from .host import HostManager
|
||||
from .services import ServiceManager
|
||||
from .snapshots import SnapshotManager
|
||||
from .supervisor import Supervisor
|
||||
from .tasks import Tasks
|
||||
from .updater import Updater
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ENV_SHARE = "SUPERVISOR_SHARE"
|
||||
ENV_NAME = "SUPERVISOR_NAME"
|
||||
ENV_REPO = "HOMEASSISTANT_REPOSITORY"
|
||||
|
||||
MACHINE_ID = Path("/etc/machine-id")
|
||||
|
||||
|
||||
async def initialize_coresys():
|
||||
"""Initialize HassIO coresys/objects."""
|
||||
coresys = CoreSys()
|
||||
|
||||
# Initialize core objects
|
||||
coresys.core = HassIO(coresys)
|
||||
coresys.arch = CpuArch(coresys)
|
||||
coresys.auth = Auth(coresys)
|
||||
coresys.updater = Updater(coresys)
|
||||
coresys.api = RestAPI(coresys)
|
||||
coresys.supervisor = Supervisor(coresys)
|
||||
coresys.homeassistant = HomeAssistant(coresys)
|
||||
coresys.addons = AddonManager(coresys)
|
||||
coresys.snapshots = SnapshotManager(coresys)
|
||||
coresys.host = HostManager(coresys)
|
||||
coresys.tasks = Tasks(coresys)
|
||||
coresys.services = ServiceManager(coresys)
|
||||
coresys.discovery = Discovery(coresys)
|
||||
coresys.dbus = DBusManager(coresys)
|
||||
coresys.hassos = HassOS(coresys)
|
||||
|
||||
# bootstrap config
|
||||
initialize_system_data(coresys)
|
||||
|
||||
# Set Machine/Host ID
|
||||
if MACHINE_ID.exists():
|
||||
coresys.machine_id = MACHINE_ID.read_text().strip()
|
||||
|
||||
return coresys
|
||||
|
||||
|
||||
def initialize_system_data(coresys):
|
||||
"""Set up the default configuration and create folders."""
|
||||
config = coresys.config
|
||||
|
||||
# Home Assistant configuration folder
|
||||
if not config.path_homeassistant.is_dir():
|
||||
_LOGGER.info("Create Home Assistant configuration folder %s",
|
||||
config.path_homeassistant)
|
||||
config.path_homeassistant.mkdir()
|
||||
|
||||
# hassio ssl folder
|
||||
if not config.path_ssl.is_dir():
|
||||
_LOGGER.info("Create Hass.io SSL/TLS folder %s", config.path_ssl)
|
||||
config.path_ssl.mkdir()
|
||||
|
||||
# hassio addon data folder
|
||||
if not config.path_addons_data.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on data folder %s",
|
||||
config.path_addons_data)
|
||||
config.path_addons_data.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_local.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on local repository folder %s",
|
||||
config.path_addons_local)
|
||||
config.path_addons_local.mkdir(parents=True)
|
||||
|
||||
if not config.path_addons_git.is_dir():
|
||||
_LOGGER.info("Create Hass.io Add-on git repositories folder %s",
|
||||
config.path_addons_git)
|
||||
config.path_addons_git.mkdir(parents=True)
|
||||
|
||||
# hassio tmp folder
|
||||
if not config.path_tmp.is_dir():
|
||||
_LOGGER.info("Create Hass.io temp folder %s", config.path_tmp)
|
||||
config.path_tmp.mkdir(parents=True)
|
||||
|
||||
# hassio backup folder
|
||||
if not config.path_backup.is_dir():
|
||||
_LOGGER.info("Create Hass.io backup folder %s", config.path_backup)
|
||||
config.path_backup.mkdir()
|
||||
|
||||
# share folder
|
||||
if not config.path_share.is_dir():
|
||||
_LOGGER.info("Create Hass.io share folder %s", config.path_share)
|
||||
config.path_share.mkdir()
|
||||
|
||||
# apparmor folder
|
||||
if not config.path_apparmor.is_dir():
|
||||
_LOGGER.info("Create Hass.io Apparmor folder %s", config.path_apparmor)
|
||||
config.path_apparmor.mkdir()
|
||||
|
||||
return config
|
||||
|
||||
|
||||
def migrate_system_env(coresys):
|
||||
"""Cleanup some stuff after update."""
|
||||
config = coresys.config
|
||||
|
||||
# hass.io 0.37 -> 0.38
|
||||
old_build = Path(config.path_hassio, "addons/build")
|
||||
if old_build.is_dir():
|
||||
try:
|
||||
old_build.rmdir()
|
||||
except OSError:
|
||||
_LOGGER.warning("Can't cleanup old Add-on build directory")
|
||||
|
||||
|
||||
def initialize_logging():
|
||||
"""Setup the logging."""
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) [%(name)s] %(message)s"
|
||||
colorfmt = f"%(log_color)s{fmt}%(reset)s"
|
||||
datefmt = "%y-%m-%d %H:%M:%S"
|
||||
|
||||
# suppress overly verbose logs from libraries that aren't helpful
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
|
||||
logging.getLogger().handlers[0].setFormatter(
|
||||
ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
"DEBUG": "cyan",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
))
|
||||
|
||||
|
||||
def check_environment():
|
||||
"""Check if all environment are exists."""
|
||||
# check environment variables
|
||||
for key in (ENV_SHARE, ENV_NAME, ENV_REPO):
|
||||
try:
|
||||
os.environ[key]
|
||||
except KeyError:
|
||||
_LOGGER.fatal("Can't find %s in env!", key)
|
||||
return False
|
||||
|
||||
# check docker socket
|
||||
if not SOCKET_DOCKER.is_socket():
|
||||
_LOGGER.fatal("Can't find Docker socket!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which("socat"):
|
||||
_LOGGER.fatal("Can't find socat!")
|
||||
return False
|
||||
|
||||
# check socat exec
|
||||
if not shutil.which("gdbus"):
|
||||
_LOGGER.fatal("Can't find gdbus!")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def reg_signal(loop):
|
||||
"""Register SIGTERM and SIGKILL to stop system."""
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGTERM,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGTERM")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGHUP,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGHUP")
|
||||
|
||||
try:
|
||||
loop.add_signal_handler(signal.SIGINT,
|
||||
lambda: loop.call_soon(loop.stop))
|
||||
except (ValueError, RuntimeError):
|
||||
_LOGGER.warning("Could not bind to SIGINT")
|
206
hassio/config.py
206
hassio/config.py
@@ -1,206 +0,0 @@
|
||||
"""Bootstrap Hass.io."""
|
||||
from datetime import datetime
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
import pytz
|
||||
|
||||
from .const import (
|
||||
FILE_HASSIO_CONFIG, HASSIO_DATA, ATTR_TIMEZONE, ATTR_ADDONS_CUSTOM_LIST,
|
||||
ATTR_LAST_BOOT, ATTR_WAIT_BOOT)
|
||||
from .utils.dt import parse_datetime
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASSIO_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HOMEASSISTANT_CONFIG = PurePath('homeassistant')
|
||||
|
||||
HASSIO_SSL = PurePath("ssl")
|
||||
|
||||
ADDONS_CORE = PurePath("addons/core")
|
||||
ADDONS_LOCAL = PurePath("addons/local")
|
||||
ADDONS_GIT = PurePath("addons/git")
|
||||
ADDONS_DATA = PurePath("addons/data")
|
||||
|
||||
BACKUP_DATA = PurePath("backup")
|
||||
SHARE_DATA = PurePath("share")
|
||||
TMP_DATA = PurePath("tmp")
|
||||
APPARMOR_DATA = PurePath("apparmor")
|
||||
|
||||
DEFAULT_BOOT_TIME = datetime.utcfromtimestamp(0).isoformat()
|
||||
|
||||
RE_TIMEZONE = re.compile(r"time_zone: (?P<timezone>[\w/\-+]+)")
|
||||
|
||||
|
||||
class CoreConfig(JsonConfig):
|
||||
"""Hold all core config data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize config object."""
|
||||
super().__init__(FILE_HASSIO_CONFIG, SCHEMA_HASSIO_CONFIG)
|
||||
|
||||
@property
|
||||
def timezone(self):
|
||||
"""Return system timezone."""
|
||||
config_file = Path(self.path_homeassistant, 'configuration.yaml')
|
||||
try:
|
||||
assert config_file.exists()
|
||||
configuration = config_file.read_text()
|
||||
|
||||
data = RE_TIMEZONE.search(configuration)
|
||||
assert data
|
||||
|
||||
timezone = data.group('timezone')
|
||||
pytz.timezone(timezone)
|
||||
except (pytz.exceptions.UnknownTimeZoneError, OSError, AssertionError):
|
||||
_LOGGER.debug("Can't parse Home Assistant timezone")
|
||||
return self._data[ATTR_TIMEZONE]
|
||||
|
||||
return timezone
|
||||
|
||||
@timezone.setter
|
||||
def timezone(self, value):
|
||||
"""Set system timezone."""
|
||||
self._data[ATTR_TIMEZONE] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
"""Return wait time for auto boot stages."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
"""Set wait boot time."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def last_boot(self):
|
||||
"""Return last boot datetime."""
|
||||
boot_str = self._data.get(ATTR_LAST_BOOT, DEFAULT_BOOT_TIME)
|
||||
|
||||
boot_time = parse_datetime(boot_str)
|
||||
if not boot_time:
|
||||
return datetime.utcfromtimestamp(1)
|
||||
return boot_time
|
||||
|
||||
@last_boot.setter
|
||||
def last_boot(self, value):
|
||||
"""Set last boot datetime."""
|
||||
self._data[ATTR_LAST_BOOT] = value.isoformat()
|
||||
|
||||
@property
|
||||
def path_hassio(self):
|
||||
"""Return Hass.io data path."""
|
||||
return HASSIO_DATA
|
||||
|
||||
@property
|
||||
def path_extern_hassio(self):
|
||||
"""Return Hass.io data path external for Docker."""
|
||||
return PurePath(os.environ['SUPERVISOR_SHARE'])
|
||||
|
||||
@property
|
||||
def path_extern_homeassistant(self):
|
||||
"""Return config path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HOMEASSISTANT_CONFIG))
|
||||
|
||||
@property
|
||||
def path_homeassistant(self):
|
||||
"""Return config path inside supervisor."""
|
||||
return Path(HASSIO_DATA, HOMEASSISTANT_CONFIG)
|
||||
|
||||
@property
|
||||
def path_extern_ssl(self):
|
||||
"""Return SSL path external for Docker."""
|
||||
return str(PurePath(self.path_extern_hassio, HASSIO_SSL))
|
||||
|
||||
@property
|
||||
def path_ssl(self):
|
||||
"""Return SSL path inside supervisor."""
|
||||
return Path(HASSIO_DATA, HASSIO_SSL)
|
||||
|
||||
@property
|
||||
def path_addons_core(self):
|
||||
"""Return git path for core Add-ons."""
|
||||
return Path(HASSIO_DATA, ADDONS_CORE)
|
||||
|
||||
@property
|
||||
def path_addons_git(self):
|
||||
"""Return path for Git Add-on."""
|
||||
return Path(HASSIO_DATA, ADDONS_GIT)
|
||||
|
||||
@property
|
||||
def path_addons_local(self):
|
||||
"""Return path for custom Add-ons."""
|
||||
return Path(HASSIO_DATA, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_extern_addons_local(self):
|
||||
"""Return path for custom Add-ons."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_LOCAL)
|
||||
|
||||
@property
|
||||
def path_addons_data(self):
|
||||
"""Return root Add-on data folder."""
|
||||
return Path(HASSIO_DATA, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_addons_data(self):
|
||||
"""Return root add-on data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, ADDONS_DATA)
|
||||
|
||||
@property
|
||||
def path_tmp(self):
|
||||
"""Return Hass.io temp folder."""
|
||||
return Path(HASSIO_DATA, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_tmp(self):
|
||||
"""Return Hass.io temp folder for Docker."""
|
||||
return PurePath(self.path_extern_hassio, TMP_DATA)
|
||||
|
||||
@property
|
||||
def path_backup(self):
|
||||
"""Return root backup data folder."""
|
||||
return Path(HASSIO_DATA, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_backup(self):
|
||||
"""Return root backup data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, BACKUP_DATA)
|
||||
|
||||
@property
|
||||
def path_share(self):
|
||||
"""Return root share data folder."""
|
||||
return Path(HASSIO_DATA, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def path_apparmor(self):
|
||||
"""Return root Apparmor profile folder."""
|
||||
return Path(HASSIO_DATA, APPARMOR_DATA)
|
||||
|
||||
@property
|
||||
def path_extern_share(self):
|
||||
"""Return root share data folder external for Docker."""
|
||||
return PurePath(self.path_extern_hassio, SHARE_DATA)
|
||||
|
||||
@property
|
||||
def addons_repositories(self):
|
||||
"""Return list of custom Add-on repositories."""
|
||||
return self._data[ATTR_ADDONS_CUSTOM_LIST]
|
||||
|
||||
def add_addon_repository(self, repo):
|
||||
"""Add a custom repository to list."""
|
||||
if repo in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].append(repo)
|
||||
|
||||
def drop_addon_repository(self, repo):
|
||||
"""Remove a custom repository from list."""
|
||||
if repo not in self._data[ATTR_ADDONS_CUSTOM_LIST]:
|
||||
return
|
||||
|
||||
self._data[ATTR_ADDONS_CUSTOM_LIST].remove(repo)
|
145
hassio/core.py
145
hassio/core.py
@@ -1,145 +0,0 @@
|
||||
"""Main file for Hass.io."""
|
||||
from contextlib import suppress
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import async_timeout
|
||||
|
||||
from .coresys import CoreSysAttributes
|
||||
from .const import (STARTUP_SYSTEM, STARTUP_SERVICES, STARTUP_APPLICATION,
|
||||
STARTUP_INITIALIZE)
|
||||
from .exceptions import HassioError, HomeAssistantError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HassIO(CoreSysAttributes):
|
||||
"""Main object of Hass.io."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Hass.io object."""
|
||||
self.coresys = coresys
|
||||
|
||||
async def setup(self):
|
||||
"""Setup HassIO orchestration."""
|
||||
# Load Supervisor
|
||||
await self.sys_supervisor.load()
|
||||
|
||||
# Load DBus
|
||||
await self.sys_dbus.load()
|
||||
|
||||
# Load Host
|
||||
await self.sys_host.load()
|
||||
|
||||
# Load Home Assistant
|
||||
await self.sys_homeassistant.load()
|
||||
|
||||
# Load CPU/Arch
|
||||
await self.sys_arch.load()
|
||||
|
||||
# Load HassOS
|
||||
await self.sys_hassos.load()
|
||||
|
||||
# Load Add-ons
|
||||
await self.sys_addons.load()
|
||||
|
||||
# rest api views
|
||||
await self.sys_api.load()
|
||||
|
||||
# load last available data
|
||||
await self.sys_updater.load()
|
||||
|
||||
# load last available data
|
||||
await self.sys_snapshots.load()
|
||||
|
||||
# load services
|
||||
await self.sys_services.load()
|
||||
|
||||
# Load discovery
|
||||
await self.sys_discovery.load()
|
||||
|
||||
# start dns forwarding
|
||||
self.sys_create_task(self.sys_dns.start())
|
||||
|
||||
async def start(self):
|
||||
"""Start Hass.io orchestration."""
|
||||
# on release channel, try update itself
|
||||
if self.sys_supervisor.need_update:
|
||||
if self.sys_dev:
|
||||
_LOGGER.warning("Ignore Hass.io updates on dev!")
|
||||
elif await self.sys_supervisor.update():
|
||||
return
|
||||
|
||||
# start api
|
||||
await self.sys_api.start()
|
||||
|
||||
# start addon mark as initialize
|
||||
await self.sys_addons.boot(STARTUP_INITIALIZE)
|
||||
|
||||
try:
|
||||
# HomeAssistant is already running / supervisor have only reboot
|
||||
if self.sys_hardware.last_boot == self.sys_config.last_boot:
|
||||
_LOGGER.info("Hass.io reboot detected")
|
||||
return
|
||||
|
||||
# reset register services / discovery
|
||||
self.sys_services.reset()
|
||||
|
||||
# start addon mark as system
|
||||
await self.sys_addons.boot(STARTUP_SYSTEM)
|
||||
|
||||
# start addon mark as services
|
||||
await self.sys_addons.boot(STARTUP_SERVICES)
|
||||
|
||||
# run HomeAssistant
|
||||
if self.sys_homeassistant.boot:
|
||||
with suppress(HomeAssistantError):
|
||||
await self.sys_homeassistant.start()
|
||||
|
||||
# start addon mark as application
|
||||
await self.sys_addons.boot(STARTUP_APPLICATION)
|
||||
|
||||
# store new last boot
|
||||
self.sys_config.last_boot = self.sys_hardware.last_boot
|
||||
self.sys_config.save_data()
|
||||
|
||||
finally:
|
||||
# Add core tasks into scheduler
|
||||
await self.sys_tasks.load()
|
||||
|
||||
# If landingpage / run upgrade in background
|
||||
if self.sys_homeassistant.version == 'landingpage':
|
||||
self.sys_create_task(self.sys_homeassistant.install())
|
||||
|
||||
_LOGGER.info("Hass.io is up and running")
|
||||
|
||||
async def stop(self):
|
||||
"""Stop a running orchestration."""
|
||||
# don't process scheduler anymore
|
||||
self.sys_scheduler.suspend = True
|
||||
|
||||
# process async stop tasks
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await asyncio.wait([
|
||||
self.sys_api.stop(),
|
||||
self.sys_dns.stop(),
|
||||
self.sys_websession.close(),
|
||||
self.sys_websession_ssl.close()
|
||||
])
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.warning("Force Shutdown!")
|
||||
|
||||
_LOGGER.info("Hass.io is down")
|
||||
|
||||
async def shutdown(self):
|
||||
"""Shutdown all running containers in correct order."""
|
||||
await self.sys_addons.shutdown(STARTUP_APPLICATION)
|
||||
|
||||
# Close Home Assistant
|
||||
with suppress(HassioError):
|
||||
await self.sys_homeassistant.stop()
|
||||
|
||||
await self.sys_addons.shutdown(STARTUP_SERVICES)
|
||||
await self.sys_addons.shutdown(STARTUP_SYSTEM)
|
||||
await self.sys_addons.shutdown(STARTUP_INITIALIZE)
|
@@ -1,39 +0,0 @@
|
||||
"""D-Bus interface objects."""
|
||||
|
||||
from .systemd import Systemd
|
||||
from .hostname import Hostname
|
||||
from .rauc import Rauc
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
|
||||
class DBusManager(CoreSysAttributes):
|
||||
"""A DBus Interface handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize D-Bus interface."""
|
||||
self.coresys = coresys
|
||||
|
||||
self._systemd = Systemd()
|
||||
self._hostname = Hostname()
|
||||
self._rauc = Rauc()
|
||||
|
||||
@property
|
||||
def systemd(self):
|
||||
"""Return the systemd interface."""
|
||||
return self._systemd
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return the hostname interface."""
|
||||
return self._hostname
|
||||
|
||||
@property
|
||||
def rauc(self):
|
||||
"""Return the rauc interface."""
|
||||
return self._rauc
|
||||
|
||||
async def load(self):
|
||||
"""Connect interfaces to D-Bus."""
|
||||
await self.systemd.connect()
|
||||
await self.hostname.connect()
|
||||
await self.rauc.connect()
|
@@ -1,39 +0,0 @@
|
||||
"""D-Bus interface for hostname."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'org.freedesktop.hostname1'
|
||||
DBUS_OBJECT = '/org/freedesktop/hostname1'
|
||||
|
||||
|
||||
class Hostname(DBusInterface):
|
||||
"""Handle D-Bus interface for hostname/system."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to system's D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to hostname")
|
||||
|
||||
@dbus_connected
|
||||
def set_static_hostname(self, hostname):
|
||||
"""Change local hostname.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.SetStaticHostname(hostname, False)
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return local host informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(DBUS_NAME)
|
@@ -1,55 +0,0 @@
|
||||
"""D-Bus interface for rauc."""
|
||||
import logging
|
||||
|
||||
from .interface import DBusInterface
|
||||
from .utils import dbus_connected
|
||||
from ..exceptions import DBusError
|
||||
from ..utils.gdbus import DBus
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DBUS_NAME = 'de.pengutronix.rauc'
|
||||
DBUS_OBJECT = '/'
|
||||
|
||||
|
||||
class Rauc(DBusInterface):
|
||||
"""Handle D-Bus interface for rauc."""
|
||||
|
||||
async def connect(self):
|
||||
"""Connect to D-Bus."""
|
||||
try:
|
||||
self.dbus = await DBus.connect(DBUS_NAME, DBUS_OBJECT)
|
||||
except DBusError:
|
||||
_LOGGER.warning("Can't connect to rauc")
|
||||
|
||||
@dbus_connected
|
||||
def install(self, raucb_file):
|
||||
"""Install rauc bundle file.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.Install(raucb_file)
|
||||
|
||||
@dbus_connected
|
||||
def get_slot_status(self):
|
||||
"""Get slot status.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.Installer.GetSlotStatus()
|
||||
|
||||
@dbus_connected
|
||||
def get_properties(self):
|
||||
"""Return rauc informations.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.get_properties(f"{DBUS_NAME}.Installer")
|
||||
|
||||
@dbus_connected
|
||||
def signal_completed(self):
|
||||
"""Return a signal wrapper for completed signal.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.dbus.wait_signal(f"{DBUS_NAME}.Installer.Completed")
|
@@ -1,122 +0,0 @@
|
||||
"""Handle discover message for Home Assistant."""
|
||||
import logging
|
||||
from contextlib import suppress
|
||||
from uuid import uuid4
|
||||
|
||||
import attr
|
||||
import voluptuous as vol
|
||||
from voluptuous.humanize import humanize_error
|
||||
|
||||
from .const import FILE_HASSIO_DISCOVERY, ATTR_CONFIG, ATTR_DISCOVERY
|
||||
from .coresys import CoreSysAttributes
|
||||
from .exceptions import DiscoveryError, HomeAssistantAPIError
|
||||
from .validate import SCHEMA_DISCOVERY_CONFIG
|
||||
from .utils.json import JsonConfig
|
||||
from .services.validate import DISCOVERY_SERVICES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CMD_NEW = 'post'
|
||||
CMD_DEL = 'delete'
|
||||
|
||||
|
||||
class Discovery(CoreSysAttributes, JsonConfig):
|
||||
"""Home Assistant Discovery handler."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize discovery handler."""
|
||||
super().__init__(FILE_HASSIO_DISCOVERY, SCHEMA_DISCOVERY_CONFIG)
|
||||
self.coresys = coresys
|
||||
self.message_obj = {}
|
||||
|
||||
async def load(self):
|
||||
"""Load exists discovery message into storage."""
|
||||
messages = {}
|
||||
for message in self._data[ATTR_DISCOVERY]:
|
||||
discovery = Message(**message)
|
||||
messages[discovery.uuid] = discovery
|
||||
|
||||
_LOGGER.info("Load %d messages", len(messages))
|
||||
self.message_obj = messages
|
||||
|
||||
def save(self):
|
||||
"""Write discovery message into data file."""
|
||||
messages = []
|
||||
for message in self.list_messages:
|
||||
messages.append(attr.asdict(message))
|
||||
|
||||
self._data[ATTR_DISCOVERY].clear()
|
||||
self._data[ATTR_DISCOVERY].extend(messages)
|
||||
self.save_data()
|
||||
|
||||
def get(self, uuid):
|
||||
"""Return discovery message."""
|
||||
return self.message_obj.get(uuid)
|
||||
|
||||
@property
|
||||
def list_messages(self):
|
||||
"""Return list of available discovery messages."""
|
||||
return list(self.message_obj.values())
|
||||
|
||||
def send(self, addon, service, config):
|
||||
"""Send a discovery message to Home Assistant."""
|
||||
try:
|
||||
config = DISCOVERY_SERVICES[service](config)
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error(
|
||||
"Invalid discovery %s config", humanize_error(config, err))
|
||||
raise DiscoveryError() from None
|
||||
|
||||
# Create message
|
||||
message = Message(addon.slug, service, config)
|
||||
|
||||
# Already exists?
|
||||
for old_message in self.list_messages:
|
||||
if old_message != message:
|
||||
continue
|
||||
_LOGGER.info("Duplicate discovery message from %s", addon.slug)
|
||||
return old_message
|
||||
|
||||
_LOGGER.info("Send discovery to Home Assistant %s from %s",
|
||||
service, addon.slug)
|
||||
self.message_obj[message.uuid] = message
|
||||
self.save()
|
||||
|
||||
self.sys_create_task(self._push_discovery(message, CMD_NEW))
|
||||
return message
|
||||
|
||||
def remove(self, message):
|
||||
"""Remove a discovery message from Home Assistant."""
|
||||
self.message_obj.pop(message.uuid, None)
|
||||
self.save()
|
||||
|
||||
_LOGGER.info("Delete discovery to Home Assistant %s from %s",
|
||||
message.service, message.addon)
|
||||
self.sys_create_task(self._push_discovery(message, CMD_DEL))
|
||||
|
||||
async def _push_discovery(self, message, command):
|
||||
"""Send a discovery request."""
|
||||
if not await self.sys_homeassistant.check_api_state():
|
||||
_LOGGER.info("Discovery %s mesage ignore", message.uuid)
|
||||
return
|
||||
|
||||
data = attr.asdict(message)
|
||||
data.pop(ATTR_CONFIG)
|
||||
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.sys_homeassistant.make_request(
|
||||
command, f"api/hassio_push/discovery/{message.uuid}",
|
||||
json=data, timeout=10):
|
||||
_LOGGER.info("Discovery %s message send", message.uuid)
|
||||
return
|
||||
|
||||
_LOGGER.warning("Discovery %s message fail", message.uuid)
|
||||
|
||||
|
||||
@attr.s
|
||||
class Message:
|
||||
"""Represent a single Discovery message."""
|
||||
addon = attr.ib()
|
||||
service = attr.ib()
|
||||
config = attr.ib(cmp=False)
|
||||
uuid = attr.ib(factory=lambda: uuid4().hex, cmp=False)
|
@@ -1,122 +0,0 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import attr
|
||||
import docker
|
||||
|
||||
from .network import DockerNetwork
|
||||
from ..const import SOCKET_DOCKER
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class CommandReturn:
|
||||
"""Return object from command run."""
|
||||
exit_code = attr.ib()
|
||||
output = attr.ib()
|
||||
|
||||
|
||||
class DockerAPI:
|
||||
"""Docker Hass.io wrapper.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.docker = docker.DockerClient(
|
||||
base_url="unix:/{}".format(str(SOCKET_DOCKER)),
|
||||
version='auto', timeout=900)
|
||||
self.network = DockerNetwork(self.docker)
|
||||
|
||||
@property
|
||||
def images(self):
|
||||
"""Return API images."""
|
||||
return self.docker.images
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
"""Return API containers."""
|
||||
return self.docker.containers
|
||||
|
||||
@property
|
||||
def api(self):
|
||||
"""Return API containers."""
|
||||
return self.docker.api
|
||||
|
||||
def run(self, image, **kwargs):
|
||||
""""Create a Docker container and run it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
name = kwargs.get('name', image)
|
||||
network_mode = kwargs.get('network_mode')
|
||||
hostname = kwargs.get('hostname')
|
||||
|
||||
# Setup network
|
||||
kwargs['dns_search'] = ["."]
|
||||
if network_mode:
|
||||
kwargs['dns'] = [str(self.network.supervisor)]
|
||||
kwargs['dns_opt'] = ["ndots:0"]
|
||||
else:
|
||||
kwargs['network'] = None
|
||||
|
||||
# Create container
|
||||
try:
|
||||
container = self.docker.containers.create(
|
||||
image, use_config_proxy=False, **kwargs)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't create container from %s: %s", name, err)
|
||||
return False
|
||||
|
||||
# attach network
|
||||
if not network_mode:
|
||||
alias = [hostname] if hostname else None
|
||||
if self.network.attach_container(container, alias=alias):
|
||||
self.network.detach_default_bridge(container)
|
||||
else:
|
||||
_LOGGER.warning("Can't attach %s to hassio-net!", name)
|
||||
|
||||
# run container
|
||||
try:
|
||||
container.start()
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't start %s: %s", name, err)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def run_command(self, image, command=None, **kwargs):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
stdout = kwargs.get('stdout', True)
|
||||
stderr = kwargs.get('stderr', True)
|
||||
|
||||
_LOGGER.info("Run command '%s' on %s", command, image)
|
||||
try:
|
||||
container = self.docker.containers.run(
|
||||
image,
|
||||
command=command,
|
||||
network=self.network.name,
|
||||
use_config_proxy=False,
|
||||
**kwargs
|
||||
)
|
||||
|
||||
# wait until command is done
|
||||
result = container.wait()
|
||||
output = container.logs(stdout=stdout, stderr=stderr)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't execute command: %s", err)
|
||||
return CommandReturn(None, b"")
|
||||
|
||||
finally:
|
||||
# cleanup container
|
||||
with suppress(docker.errors.DockerException):
|
||||
container.remove(force=True)
|
||||
|
||||
return CommandReturn(result.get('StatusCode'), output)
|
@@ -1,447 +0,0 @@
|
||||
"""Init file for Hass.io add-on Docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
import requests
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..addons.build import AddonBuild
|
||||
from ..const import (MAP_CONFIG, MAP_SSL, MAP_ADDONS, MAP_BACKUP, MAP_SHARE,
|
||||
ENV_TOKEN, ENV_TIME, SECURITY_PROFILE, SECURITY_DISABLE)
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AUDIO_DEVICE = "/dev/snd:/dev/snd:rwm"
|
||||
|
||||
|
||||
class DockerAddon(DockerInterface):
|
||||
"""Docker Hass.io wrapper for Home Assistant."""
|
||||
|
||||
def __init__(self, coresys, slug):
|
||||
"""Initialize Docker Home Assistant wrapper."""
|
||||
super().__init__(coresys)
|
||||
self._id = slug
|
||||
|
||||
@property
|
||||
def addon(self):
|
||||
"""Return add-on of Docker image."""
|
||||
return self.sys_addons.get(self._id)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of Docker image."""
|
||||
return self.addon.image
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout for Docker actions."""
|
||||
return self.addon.timeout
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of Docker image."""
|
||||
if self.addon.legacy:
|
||||
return self.addon.version_installed
|
||||
return super().version
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of Docker image."""
|
||||
if self.addon.legacy:
|
||||
return self.sys_arch.default
|
||||
return super().arch
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of Docker container."""
|
||||
return "addon_{}".format(self.addon.slug)
|
||||
|
||||
@property
|
||||
def ipc(self):
|
||||
"""Return the IPC namespace."""
|
||||
if self.addon.host_ipc:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def full_access(self):
|
||||
"""Return True if full access is enabled."""
|
||||
return not self.addon.protected and self.addon.with_full_access
|
||||
|
||||
@property
|
||||
def hostname(self):
|
||||
"""Return slug/id of add-on."""
|
||||
return self.addon.slug.replace('_', '-')
|
||||
|
||||
@property
|
||||
def environment(self):
|
||||
"""Return environment for Docker add-on."""
|
||||
addon_env = self.addon.environment or {}
|
||||
|
||||
# Provide options for legacy add-ons
|
||||
if self.addon.legacy:
|
||||
for key, value in self.addon.options.items():
|
||||
if isinstance(value, (int, str)):
|
||||
addon_env[key] = value
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Can not set nested option %s as Docker env", key)
|
||||
|
||||
return {
|
||||
**addon_env,
|
||||
ENV_TIME: self.sys_timezone,
|
||||
ENV_TOKEN: self.addon.hassio_token,
|
||||
}
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Return needed devices."""
|
||||
devices = self.addon.devices or []
|
||||
|
||||
# Use audio devices
|
||||
if self.addon.with_audio and self.sys_hardware.support_audio:
|
||||
devices.append(AUDIO_DEVICE)
|
||||
|
||||
# Auto mapping UART devices
|
||||
if self.addon.auto_uart:
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
|
||||
# Return None if no devices is present
|
||||
return devices or None
|
||||
|
||||
@property
|
||||
def ports(self):
|
||||
"""Filter None from add-on ports."""
|
||||
if not self.addon.ports:
|
||||
return None
|
||||
|
||||
return {
|
||||
container_port: host_port
|
||||
for container_port, host_port in self.addon.ports.items()
|
||||
if host_port
|
||||
}
|
||||
|
||||
@property
|
||||
def security_opt(self):
|
||||
"""Controlling security options."""
|
||||
security = []
|
||||
|
||||
# AppArmor
|
||||
apparmor = self.sys_host.apparmor.available
|
||||
if not apparmor or self.addon.apparmor == SECURITY_DISABLE:
|
||||
security.append("apparmor:unconfined")
|
||||
elif self.addon.apparmor == SECURITY_PROFILE:
|
||||
security.append(f"apparmor={self.addon.slug}")
|
||||
|
||||
# Disable Seccomp / We don't support it official and it
|
||||
# make troubles on some kind of host systems.
|
||||
security.append("seccomp=unconfined")
|
||||
|
||||
return security
|
||||
|
||||
@property
|
||||
def tmpfs(self):
|
||||
"""Return tmpfs for Docker add-on."""
|
||||
options = self.addon.tmpfs
|
||||
if options:
|
||||
return {"/tmpfs": f"{options}"}
|
||||
return None
|
||||
|
||||
@property
|
||||
def network_mapping(self):
|
||||
"""Return hosts mapping."""
|
||||
return {
|
||||
'homeassistant': self.sys_docker.network.gateway,
|
||||
'hassio': self.sys_docker.network.supervisor,
|
||||
}
|
||||
|
||||
@property
|
||||
def network_mode(self):
|
||||
"""Return network mode for add-on."""
|
||||
if self.addon.host_network:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def pid_mode(self):
|
||||
"""Return PID mode for add-on."""
|
||||
if not self.addon.protected and self.addon.host_pid:
|
||||
return 'host'
|
||||
return None
|
||||
|
||||
@property
|
||||
def volumes(self):
|
||||
"""Generate volumes for mappings."""
|
||||
volumes = {
|
||||
str(self.addon.path_extern_data): {
|
||||
'bind': "/data",
|
||||
'mode': 'rw'
|
||||
}
|
||||
}
|
||||
|
||||
addon_mapping = self.addon.map_volumes
|
||||
|
||||
# setup config mappings
|
||||
if MAP_CONFIG in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_homeassistant): {
|
||||
'bind': "/config",
|
||||
'mode': addon_mapping[MAP_CONFIG]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_SSL in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_ssl): {
|
||||
'bind': "/ssl",
|
||||
'mode': addon_mapping[MAP_SSL]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_ADDONS in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_addons_local): {
|
||||
'bind': "/addons",
|
||||
'mode': addon_mapping[MAP_ADDONS]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_BACKUP in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_backup): {
|
||||
'bind': "/backup",
|
||||
'mode': addon_mapping[MAP_BACKUP]
|
||||
}
|
||||
})
|
||||
|
||||
if MAP_SHARE in addon_mapping:
|
||||
volumes.update({
|
||||
str(self.sys_config.path_extern_share): {
|
||||
'bind': "/share",
|
||||
'mode': addon_mapping[MAP_SHARE]
|
||||
}
|
||||
})
|
||||
|
||||
# Init other hardware mappings
|
||||
|
||||
# GPIO support
|
||||
if self.addon.with_gpio and self.sys_hardware.support_gpio:
|
||||
for gpio_path in ("/sys/class/gpio", "/sys/devices/platform/soc"):
|
||||
volumes.update({
|
||||
gpio_path: {
|
||||
'bind': gpio_path,
|
||||
'mode': 'rw'
|
||||
},
|
||||
})
|
||||
|
||||
# DeviceTree support
|
||||
if self.addon.with_devicetree:
|
||||
volumes.update({
|
||||
"/sys/firmware/devicetree/base": {
|
||||
'bind': "/device-tree",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Kernel Modules support
|
||||
if self.addon.with_kernel_modules:
|
||||
volumes.update({
|
||||
"/lib/modules": {
|
||||
'bind': "/lib/modules",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Docker API support
|
||||
if not self.addon.protected and self.addon.access_docker_api:
|
||||
volumes.update({
|
||||
"/var/run/docker.sock": {
|
||||
'bind': "/var/run/docker.sock",
|
||||
'mode': 'ro'
|
||||
},
|
||||
})
|
||||
|
||||
# Host D-Bus system
|
||||
if self.addon.host_dbus:
|
||||
volumes.update({
|
||||
"/var/run/dbus": {
|
||||
'bind': "/var/run/dbus",
|
||||
'mode': 'rw'
|
||||
}
|
||||
})
|
||||
|
||||
# ALSA configuration
|
||||
if self.addon.with_audio:
|
||||
volumes.update({
|
||||
str(self.addon.path_extern_asound): {
|
||||
'bind': "/etc/asound.conf",
|
||||
'mode': 'ro'
|
||||
}
|
||||
})
|
||||
|
||||
return volumes
|
||||
|
||||
def _run(self):
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return True
|
||||
|
||||
# Security check
|
||||
if not self.addon.protected:
|
||||
_LOGGER.warning("%s run with disabled protected mode!",
|
||||
self.addon.name)
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.hostname,
|
||||
detach=True,
|
||||
init=True,
|
||||
privileged=self.full_access,
|
||||
ipc_mode=self.ipc,
|
||||
stdin_open=self.addon.with_stdin,
|
||||
network_mode=self.network_mode,
|
||||
pid_mode=self.pid_mode,
|
||||
ports=self.ports,
|
||||
extra_hosts=self.network_mapping,
|
||||
devices=self.devices,
|
||||
cap_add=self.addon.privileged,
|
||||
security_opt=self.security_opt,
|
||||
environment=self.environment,
|
||||
volumes=self.volumes,
|
||||
tmpfs=self.tmpfs)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start Docker add-on %s with version %s", self.image,
|
||||
self.version)
|
||||
|
||||
return ret
|
||||
|
||||
def _install(self, tag, image=None):
|
||||
"""Pull Docker image or build it.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self.addon.need_build:
|
||||
return self._build(tag)
|
||||
|
||||
return super()._install(tag, image)
|
||||
|
||||
def _build(self, tag):
|
||||
"""Build a Docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
build_env = AddonBuild(self.coresys, self._id)
|
||||
|
||||
_LOGGER.info("Start build %s:%s", self.image, tag)
|
||||
try:
|
||||
image, log = self.sys_docker.images.build(
|
||||
use_config_proxy=False, **build_env.get_docker_args(tag))
|
||||
|
||||
_LOGGER.debug("Build %s:%s done: %s", self.image, tag, log)
|
||||
image.tag(self.image, tag='latest')
|
||||
|
||||
# Update meta data
|
||||
self._meta = image.attrs
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't build %s:%s: %s", self.image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Build %s:%s done", self.image, tag)
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def export_image(self, path):
|
||||
"""Export current images into a tar file."""
|
||||
return self.sys_run_in_executor(self._export_image, path)
|
||||
|
||||
def _export_image(self, tar_file):
|
||||
"""Export current images into a tar file.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.api.get_image(self.image)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't fetch image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s to %s", self.image, tar_file)
|
||||
try:
|
||||
with tar_file.open("wb") as write_tar:
|
||||
for chunk in image:
|
||||
write_tar.write(chunk)
|
||||
except (OSError, requests.exceptions.ReadTimeout) as err:
|
||||
_LOGGER.error("Can't write tar file %s: %s", tar_file, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Export image %s done", self.image)
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def import_image(self, path, tag):
|
||||
"""Import a tar file as image."""
|
||||
return self.sys_run_in_executor(self._import_image, path, tag)
|
||||
|
||||
def _import_image(self, tar_file, tag):
|
||||
"""Import a tar file as image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
with tar_file.open("rb") as read_tar:
|
||||
self.sys_docker.api.load_image(read_tar, quiet=True)
|
||||
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
image.tag(self.image, tag=tag)
|
||||
except (docker.errors.DockerException, OSError) as err:
|
||||
_LOGGER.error("Can't import image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Import image %s and tag %s", tar_file, tag)
|
||||
self._meta = image.attrs
|
||||
self._cleanup()
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def write_stdin(self, data):
|
||||
"""Write to add-on stdin."""
|
||||
return self.sys_run_in_executor(self._write_stdin, data)
|
||||
|
||||
def _write_stdin(self, data):
|
||||
"""Write to add-on stdin.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if not self._is_running():
|
||||
return False
|
||||
|
||||
try:
|
||||
# Load needed docker objects
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
socket = container.attach_socket(params={'stdin': 1, 'stream': 1})
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't attach to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
try:
|
||||
# Write to stdin
|
||||
data += b"\n"
|
||||
os.write(socket.fileno(), data)
|
||||
socket.close()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't write to %s stdin: %s", self.name, err)
|
||||
return False
|
||||
|
||||
return True
|
@@ -1,37 +0,0 @@
|
||||
"""HassOS Cli docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..coresys import CoreSysAttributes
|
||||
from .interface import DockerInterface
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerHassOSCli(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for HassOS Cli."""
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of HassOS CLI image."""
|
||||
return f"homeassistant/{self.sys_arch.supervisor}-hassio-cli"
|
||||
|
||||
def _stop(self):
|
||||
"""Don't need stop."""
|
||||
return True
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running Docker container.
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
image = self.sys_docker.images.get(self.image)
|
||||
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find a HassOS CLI %s", self.image)
|
||||
|
||||
else:
|
||||
self._meta = image.attrs
|
||||
_LOGGER.info("Found HassOS CLI %s with version %s", self.image,
|
||||
self.version)
|
@@ -1,124 +0,0 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..const import ENV_TOKEN, ENV_TIME, LABEL_MACHINE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
HASS_DOCKER_NAME = 'homeassistant'
|
||||
|
||||
|
||||
class DockerHomeAssistant(DockerInterface):
|
||||
"""Docker Hass.io wrapper for Home Assistant."""
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return machine of Home Assistant Docker image."""
|
||||
if self._meta and LABEL_MACHINE in self._meta['Config']['Labels']:
|
||||
return self._meta['Config']['Labels'][LABEL_MACHINE]
|
||||
return None
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of Docker image."""
|
||||
return self.sys_homeassistant.image
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of Docker container."""
|
||||
return HASS_DOCKER_NAME
|
||||
|
||||
@property
|
||||
def devices(self):
|
||||
"""Create list of special device to map into Docker."""
|
||||
devices = []
|
||||
for device in self.sys_hardware.serial_devices:
|
||||
devices.append(f"{device}:{device}:rwm")
|
||||
return devices or None
|
||||
|
||||
def _run(self):
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
if self._is_running():
|
||||
return False
|
||||
|
||||
# cleanup
|
||||
self._stop()
|
||||
|
||||
ret = self.sys_docker.run(
|
||||
self.image,
|
||||
name=self.name,
|
||||
hostname=self.name,
|
||||
detach=True,
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
network_mode='host',
|
||||
environment={
|
||||
'HASSIO': self.sys_docker.network.supervisor,
|
||||
ENV_TIME: self.sys_timezone,
|
||||
ENV_TOKEN: self.sys_homeassistant.hassio_token,
|
||||
},
|
||||
volumes={
|
||||
str(self.sys_config.path_extern_homeassistant):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.sys_config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.sys_config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'rw'},
|
||||
}
|
||||
)
|
||||
|
||||
if ret:
|
||||
_LOGGER.info("Start homeassistant %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
return ret
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
return self.sys_docker.run_command(
|
||||
self.image,
|
||||
command,
|
||||
privileged=True,
|
||||
init=True,
|
||||
devices=self.devices,
|
||||
detach=True,
|
||||
stdout=True,
|
||||
stderr=True,
|
||||
environment={
|
||||
ENV_TIME: self.sys_timezone,
|
||||
},
|
||||
volumes={
|
||||
str(self.sys_config.path_extern_homeassistant):
|
||||
{'bind': '/config', 'mode': 'rw'},
|
||||
str(self.sys_config.path_extern_ssl):
|
||||
{'bind': '/ssl', 'mode': 'ro'},
|
||||
str(self.sys_config.path_extern_share):
|
||||
{'bind': '/share', 'mode': 'ro'},
|
||||
}
|
||||
)
|
||||
|
||||
def is_initialize(self):
|
||||
"""Return True if Docker container exists."""
|
||||
return self.sys_run_in_executor(self._is_initialize)
|
||||
|
||||
def _is_initialize(self):
|
||||
"""Return True if docker container exists.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
return True
|
@@ -1,334 +0,0 @@
|
||||
"""Interface class for Hass.io Docker object."""
|
||||
import asyncio
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from .stats import DockerStats
|
||||
from ..const import LABEL_VERSION, LABEL_ARCH
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..utils import process_lock
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerInterface(CoreSysAttributes):
|
||||
"""Docker Hass.io interface."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Docker base wrapper."""
|
||||
self.coresys = coresys
|
||||
self._meta = None
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
|
||||
@property
|
||||
def timeout(self):
|
||||
"""Return timeout for Docker actions."""
|
||||
return 30
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of Docker container."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def meta_config(self):
|
||||
"""Return meta data of configuration for container/image."""
|
||||
if not self._meta:
|
||||
return {}
|
||||
return self._meta.get('Config', {})
|
||||
|
||||
@property
|
||||
def meta_labels(self):
|
||||
"""Return meta data of labels for container/image."""
|
||||
return self.meta_config.get('Labels') or {}
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return name of Docker image."""
|
||||
return self.meta_config.get('Image')
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of Docker image."""
|
||||
return self.meta_labels.get(LABEL_VERSION)
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of Docker image."""
|
||||
return self.meta_labels.get(LABEL_ARCH)
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.lock.locked()
|
||||
|
||||
@process_lock
|
||||
def install(self, tag, image=None):
|
||||
"""Pull docker image."""
|
||||
return self.sys_run_in_executor(self._install, tag, image)
|
||||
|
||||
def _install(self, tag, image=None):
|
||||
"""Pull Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
image = image or self.image
|
||||
|
||||
try:
|
||||
_LOGGER.info("Pull image %s tag %s.", image, tag)
|
||||
docker_image = self.sys_docker.images.pull(f"{image}:{tag}")
|
||||
|
||||
docker_image.tag(image, tag='latest')
|
||||
self._meta = docker_image.attrs
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't install %s:%s -> %s.", image, tag, err)
|
||||
return False
|
||||
|
||||
_LOGGER.info("Tag image %s with version %s as latest", image, tag)
|
||||
return True
|
||||
|
||||
def exists(self):
|
||||
"""Return True if Docker image exists in local repository."""
|
||||
return self.sys_run_in_executor(self._exists)
|
||||
|
||||
def _exists(self):
|
||||
"""Return True if Docker image exists in local repository.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
assert f"{self.image}:{self.version}" in docker_image.tags
|
||||
except (docker.errors.DockerException, AssertionError):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._is_running)
|
||||
|
||||
def _is_running(self):
|
||||
"""Return True if Docker is running.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
docker_image = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
# container is not running
|
||||
if docker_container.status != 'running':
|
||||
return False
|
||||
|
||||
# we run on an old image, stop and start it
|
||||
if docker_container.image.id != docker_image.id:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def attach(self):
|
||||
"""Attach to running Docker container."""
|
||||
return self.sys_run_in_executor(self._attach)
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
if self.image:
|
||||
self._meta = self.sys_docker.images.get(self.image).attrs
|
||||
else:
|
||||
self._meta = self.sys_docker.containers.get(self.name).attrs
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
_LOGGER.info("Attach to image %s with version %s", self.image,
|
||||
self.version)
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def run(self):
|
||||
"""Run Docker image."""
|
||||
return self.sys_run_in_executor(self._run)
|
||||
|
||||
def _run(self):
|
||||
"""Run Docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
@process_lock
|
||||
def stop(self):
|
||||
"""Stop/remove Docker container."""
|
||||
return self.sys_run_in_executor(self._stop)
|
||||
|
||||
def _stop(self):
|
||||
"""Stop/remove and remove docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
if docker_container.status == 'running':
|
||||
_LOGGER.info("Stop %s Docker application", self.image)
|
||||
with suppress(docker.errors.DockerException):
|
||||
docker_container.stop(timeout=self.timeout)
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Clean %s Docker application", self.image)
|
||||
docker_container.remove(force=True)
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def remove(self):
|
||||
"""Remove Docker images."""
|
||||
return self.sys_run_in_executor(self._remove)
|
||||
|
||||
def _remove(self):
|
||||
"""remove docker images.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
# Cleanup container
|
||||
self._stop()
|
||||
|
||||
_LOGGER.info("Remove Docker %s with latest and %s", self.image,
|
||||
self.version)
|
||||
|
||||
try:
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:latest", force=True)
|
||||
|
||||
with suppress(docker.errors.ImageNotFound):
|
||||
self.sys_docker.images.remove(
|
||||
image=f"{self.image}:{self.version}", force=True)
|
||||
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't remove image %s: %s", self.image, err)
|
||||
return False
|
||||
|
||||
self._meta = None
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def update(self, tag, image=None):
|
||||
"""Update a Docker image."""
|
||||
return self.sys_run_in_executor(self._update, tag, image)
|
||||
|
||||
def _update(self, tag, image=None):
|
||||
"""Update a docker image.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
image = image or self.image
|
||||
|
||||
_LOGGER.info("Update Docker %s:%s to %s:%s", self.image, self.version,
|
||||
image, tag)
|
||||
|
||||
# Update docker image
|
||||
if not self._install(tag, image):
|
||||
return False
|
||||
|
||||
# Stop container & cleanup
|
||||
self._stop()
|
||||
self._cleanup()
|
||||
|
||||
return True
|
||||
|
||||
def logs(self):
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Return a Future.
|
||||
"""
|
||||
return self.sys_run_in_executor(self._logs)
|
||||
|
||||
def _logs(self):
|
||||
"""Return Docker logs of container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return b""
|
||||
|
||||
try:
|
||||
return docker_container.logs(tail=100, stdout=True, stderr=True)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.warning("Can't grep logs from %s: %s", self.image, err)
|
||||
|
||||
@process_lock
|
||||
def cleanup(self):
|
||||
"""Check if old version exists and cleanup."""
|
||||
return self.sys_run_in_executor(self._cleanup)
|
||||
|
||||
def _cleanup(self):
|
||||
"""Check if old version exists and cleanup.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
latest = self.sys_docker.images.get(self.image)
|
||||
except docker.errors.DockerException:
|
||||
_LOGGER.warning("Can't find %s for cleanup", self.image)
|
||||
return False
|
||||
|
||||
for image in self.sys_docker.images.list(name=self.image):
|
||||
if latest.id == image.id:
|
||||
continue
|
||||
|
||||
with suppress(docker.errors.DockerException):
|
||||
_LOGGER.info("Cleanup Docker images: %s", image.tags)
|
||||
self.sys_docker.images.remove(image.id, force=True)
|
||||
|
||||
return True
|
||||
|
||||
@process_lock
|
||||
def execute_command(self, command):
|
||||
"""Create a temporary container and run command."""
|
||||
return self.sys_run_in_executor(self._execute_command, command)
|
||||
|
||||
def _execute_command(self, command):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def stats(self):
|
||||
"""Read and return stats from container."""
|
||||
return self.sys_run_in_executor(self._stats)
|
||||
|
||||
def _stats(self):
|
||||
"""Create a temporary container and run command.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
docker_container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return None
|
||||
|
||||
try:
|
||||
stats = docker_container.stats(stream=False)
|
||||
return DockerStats(stats)
|
||||
except docker.errors.DockerException as err:
|
||||
_LOGGER.error("Can't read stats from %s: %s", self.name, err)
|
||||
return None
|
@@ -1,93 +0,0 @@
|
||||
"""Internal network manager for Hass.io."""
|
||||
import logging
|
||||
|
||||
import docker
|
||||
|
||||
from ..const import DOCKER_NETWORK_MASK, DOCKER_NETWORK, DOCKER_NETWORK_RANGE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerNetwork:
|
||||
"""Internal Hass.io Network.
|
||||
|
||||
This class is not AsyncIO safe!
|
||||
"""
|
||||
|
||||
def __init__(self, dock):
|
||||
"""Initialize internal Hass.io network."""
|
||||
self.docker = dock
|
||||
self.network = self._get_network()
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of network."""
|
||||
return DOCKER_NETWORK
|
||||
|
||||
@property
|
||||
def containers(self):
|
||||
"""Return of connected containers from network."""
|
||||
return self.network.containers
|
||||
|
||||
@property
|
||||
def gateway(self):
|
||||
"""Return gateway of the network."""
|
||||
return DOCKER_NETWORK_MASK[1]
|
||||
|
||||
@property
|
||||
def supervisor(self):
|
||||
"""Return supervisor of the network."""
|
||||
return DOCKER_NETWORK_MASK[2]
|
||||
|
||||
def _get_network(self):
|
||||
"""Get HassIO network."""
|
||||
try:
|
||||
return self.docker.networks.get(DOCKER_NETWORK)
|
||||
except docker.errors.NotFound:
|
||||
_LOGGER.info("Can't find Hass.io network, create new network")
|
||||
|
||||
ipam_pool = docker.types.IPAMPool(
|
||||
subnet=str(DOCKER_NETWORK_MASK),
|
||||
gateway=str(self.gateway),
|
||||
iprange=str(DOCKER_NETWORK_RANGE)
|
||||
)
|
||||
|
||||
ipam_config = docker.types.IPAMConfig(pool_configs=[ipam_pool])
|
||||
|
||||
return self.docker.networks.create(
|
||||
DOCKER_NETWORK, driver='bridge', ipam=ipam_config,
|
||||
enable_ipv6=False, options={
|
||||
"com.docker.network.bridge.name": DOCKER_NETWORK,
|
||||
})
|
||||
|
||||
def attach_container(self, container, alias=None, ipv4=None):
|
||||
"""Attach container to Hass.io network.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
ipv4 = str(ipv4) if ipv4 else None
|
||||
|
||||
try:
|
||||
self.network.connect(container, aliases=alias, ipv4_address=ipv4)
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.error("Can't link container to hassio-net: %s", err)
|
||||
return False
|
||||
|
||||
self.network.reload()
|
||||
return True
|
||||
|
||||
def detach_default_bridge(self, container):
|
||||
"""Detach default Docker bridge.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
default_network = self.docker.networks.get('bridge')
|
||||
default_network.disconnect(container)
|
||||
|
||||
except docker.errors.NotFound:
|
||||
return
|
||||
|
||||
except docker.errors.APIError as err:
|
||||
_LOGGER.warning(
|
||||
"Can't disconnect container from default: %s", err)
|
@@ -1,42 +0,0 @@
|
||||
"""Init file for Hass.io Docker object."""
|
||||
import logging
|
||||
import os
|
||||
|
||||
import docker
|
||||
|
||||
from .interface import DockerInterface
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class DockerSupervisor(DockerInterface, CoreSysAttributes):
|
||||
"""Docker Hass.io wrapper for Supervisor."""
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return name of Docker container."""
|
||||
return os.environ['SUPERVISOR_NAME']
|
||||
|
||||
def _attach(self):
|
||||
"""Attach to running docker container.
|
||||
|
||||
Need run inside executor.
|
||||
"""
|
||||
try:
|
||||
container = self.sys_docker.containers.get(self.name)
|
||||
except docker.errors.DockerException:
|
||||
return False
|
||||
|
||||
self._meta = container.attrs
|
||||
_LOGGER.info("Attach to Supervisor %s with version %s",
|
||||
self.image, self.version)
|
||||
|
||||
# If already attach
|
||||
if container in self.sys_docker.network.containers:
|
||||
return True
|
||||
|
||||
# Attach to network
|
||||
return self.sys_docker.network.attach_container(
|
||||
container, alias=['hassio'],
|
||||
ipv4=self.sys_docker.network.supervisor)
|
@@ -1,498 +0,0 @@
|
||||
"""Home Assistant control object."""
|
||||
import asyncio
|
||||
from contextlib import asynccontextmanager, suppress
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
import socket
|
||||
import time
|
||||
|
||||
import aiohttp
|
||||
from aiohttp import hdrs
|
||||
import attr
|
||||
|
||||
from .const import (FILE_HASSIO_HOMEASSISTANT, ATTR_IMAGE, ATTR_LAST_VERSION,
|
||||
ATTR_UUID, ATTR_BOOT, ATTR_PASSWORD, ATTR_PORT, ATTR_SSL,
|
||||
ATTR_WATCHDOG, ATTR_WAIT_BOOT, ATTR_REFRESH_TOKEN,
|
||||
ATTR_ACCESS_TOKEN, HEADER_HA_ACCESS)
|
||||
from .coresys import CoreSysAttributes
|
||||
from .docker.homeassistant import DockerHomeAssistant
|
||||
from .exceptions import (HomeAssistantUpdateError, HomeAssistantError,
|
||||
HomeAssistantAPIError, HomeAssistantAuthError)
|
||||
from .utils import convert_to_ascii, process_lock, create_token
|
||||
from .utils.json import JsonConfig
|
||||
from .validate import SCHEMA_HASS_CONFIG
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
RE_YAML_ERROR = re.compile(r"homeassistant\.util\.yaml")
|
||||
|
||||
|
||||
@attr.s(frozen=True)
|
||||
class ConfigResult:
|
||||
"""Return object from config check."""
|
||||
valid = attr.ib()
|
||||
log = attr.ib()
|
||||
|
||||
|
||||
class HomeAssistant(JsonConfig, CoreSysAttributes):
|
||||
"""Home Assistant core object for handle it."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Home Assistant object."""
|
||||
super().__init__(FILE_HASSIO_HOMEASSISTANT, SCHEMA_HASS_CONFIG)
|
||||
self.coresys = coresys
|
||||
self.instance = DockerHomeAssistant(coresys)
|
||||
self.lock = asyncio.Lock(loop=coresys.loop)
|
||||
self._error_state = False
|
||||
# We don't persist access tokens. Instead we fetch new ones when needed
|
||||
self.access_token = None
|
||||
self._access_token_expires = None
|
||||
|
||||
async def load(self):
|
||||
"""Prepare Home Assistant object."""
|
||||
if await self.instance.attach():
|
||||
return
|
||||
|
||||
_LOGGER.info("No Home Assistant Docker image %s found.", self.image)
|
||||
await self.install_landingpage()
|
||||
|
||||
@property
|
||||
def machine(self):
|
||||
"""Return the system machines."""
|
||||
return self.instance.machine
|
||||
|
||||
@property
|
||||
def arch(self):
|
||||
"""Return arch of running Home Assistant."""
|
||||
return self.instance.arch
|
||||
|
||||
@property
|
||||
def error_state(self):
|
||||
"""Return True if system is in error."""
|
||||
return self._error_state
|
||||
|
||||
@property
|
||||
def api_ip(self):
|
||||
"""Return IP of Home Assistant instance."""
|
||||
return self.sys_docker.network.gateway
|
||||
|
||||
@property
|
||||
def api_port(self):
|
||||
"""Return network port to Home Assistant instance."""
|
||||
return self._data[ATTR_PORT]
|
||||
|
||||
@api_port.setter
|
||||
def api_port(self, value):
|
||||
"""Set network port for Home Assistant instance."""
|
||||
self._data[ATTR_PORT] = value
|
||||
|
||||
@property
|
||||
def api_password(self):
|
||||
"""Return password for Home Assistant instance."""
|
||||
return self._data.get(ATTR_PASSWORD)
|
||||
|
||||
@api_password.setter
|
||||
def api_password(self, value):
|
||||
"""Set password for Home Assistant instance."""
|
||||
self._data[ATTR_PASSWORD] = value
|
||||
|
||||
@property
|
||||
def api_ssl(self):
|
||||
"""Return if we need ssl to Home Assistant instance."""
|
||||
return self._data[ATTR_SSL]
|
||||
|
||||
@api_ssl.setter
|
||||
def api_ssl(self, value):
|
||||
"""Set SSL for Home Assistant instance."""
|
||||
self._data[ATTR_SSL] = value
|
||||
|
||||
@property
|
||||
def api_url(self):
|
||||
"""Return API url to Home Assistant."""
|
||||
return "{}://{}:{}".format('https' if self.api_ssl else 'http',
|
||||
self.api_ip, self.api_port)
|
||||
|
||||
@property
|
||||
def watchdog(self):
|
||||
"""Return True if the watchdog should protect Home Assistant."""
|
||||
return self._data[ATTR_WATCHDOG]
|
||||
|
||||
@watchdog.setter
|
||||
def watchdog(self, value):
|
||||
"""Return True if the watchdog should protect Home Assistant."""
|
||||
self._data[ATTR_WATCHDOG] = value
|
||||
|
||||
@property
|
||||
def wait_boot(self):
|
||||
"""Return time to wait for Home Assistant startup."""
|
||||
return self._data[ATTR_WAIT_BOOT]
|
||||
|
||||
@wait_boot.setter
|
||||
def wait_boot(self, value):
|
||||
"""Set time to wait for Home Assistant startup."""
|
||||
self._data[ATTR_WAIT_BOOT] = value
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
"""Return version of running Home Assistant."""
|
||||
return self.instance.version
|
||||
|
||||
@property
|
||||
def last_version(self):
|
||||
"""Return last available version of Home Assistant."""
|
||||
if self.is_custom_image:
|
||||
return self._data.get(ATTR_LAST_VERSION)
|
||||
return self.sys_updater.version_homeassistant
|
||||
|
||||
@last_version.setter
|
||||
def last_version(self, value):
|
||||
"""Set last available version of Home Assistant."""
|
||||
if value:
|
||||
self._data[ATTR_LAST_VERSION] = value
|
||||
else:
|
||||
self._data.pop(ATTR_LAST_VERSION, None)
|
||||
|
||||
@property
|
||||
def image(self):
|
||||
"""Return image name of the Home Assistant container."""
|
||||
if self._data.get(ATTR_IMAGE):
|
||||
return self._data[ATTR_IMAGE]
|
||||
return os.environ['HOMEASSISTANT_REPOSITORY']
|
||||
|
||||
@image.setter
|
||||
def image(self, value):
|
||||
"""Set image name of Home Assistant container."""
|
||||
if value:
|
||||
self._data[ATTR_IMAGE] = value
|
||||
else:
|
||||
self._data.pop(ATTR_IMAGE, None)
|
||||
|
||||
@property
|
||||
def is_custom_image(self):
|
||||
"""Return True if a custom image is used."""
|
||||
return all(
|
||||
attr in self._data for attr in (ATTR_IMAGE, ATTR_LAST_VERSION))
|
||||
|
||||
@property
|
||||
def boot(self):
|
||||
"""Return True if Home Assistant boot is enabled."""
|
||||
return self._data[ATTR_BOOT]
|
||||
|
||||
@boot.setter
|
||||
def boot(self, value):
|
||||
"""Set Home Assistant boot options."""
|
||||
self._data[ATTR_BOOT] = value
|
||||
|
||||
@property
|
||||
def uuid(self):
|
||||
"""Return a UUID of this Home Assistant instance."""
|
||||
return self._data[ATTR_UUID]
|
||||
|
||||
@property
|
||||
def hassio_token(self):
|
||||
"""Return an access token for the Hass.io API."""
|
||||
return self._data.get(ATTR_ACCESS_TOKEN)
|
||||
|
||||
@property
|
||||
def refresh_token(self):
|
||||
"""Return the refresh token to authenticate with Home Assistant."""
|
||||
return self._data.get(ATTR_REFRESH_TOKEN)
|
||||
|
||||
@refresh_token.setter
|
||||
def refresh_token(self, value):
|
||||
"""Set Home Assistant refresh_token."""
|
||||
self._data[ATTR_REFRESH_TOKEN] = value
|
||||
|
||||
@process_lock
|
||||
async def install_landingpage(self):
|
||||
"""Install a landing page."""
|
||||
_LOGGER.info("Setup HomeAssistant landingpage")
|
||||
while True:
|
||||
if await self.instance.install('landingpage'):
|
||||
break
|
||||
_LOGGER.warning("Fails install landingpage, retry after 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
@process_lock
|
||||
async def install(self):
|
||||
"""Install a landing page."""
|
||||
_LOGGER.info("Setup Home Assistant")
|
||||
while True:
|
||||
# read homeassistant tag and install it
|
||||
if not self.last_version:
|
||||
await self.sys_updater.reload()
|
||||
|
||||
tag = self.last_version
|
||||
if tag and await self.instance.install(tag):
|
||||
break
|
||||
_LOGGER.warning("Error on install Home Assistant. Retry in 30sec")
|
||||
await asyncio.sleep(30)
|
||||
|
||||
# finishing
|
||||
_LOGGER.info("Home Assistant docker now installed")
|
||||
try:
|
||||
if not self.boot:
|
||||
return
|
||||
_LOGGER.info("Start Home Assistant")
|
||||
await self._start()
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Can't start Home Assistant!")
|
||||
finally:
|
||||
await self.instance.cleanup()
|
||||
|
||||
@process_lock
|
||||
async def update(self, version=None):
|
||||
"""Update HomeAssistant version."""
|
||||
version = version or self.last_version
|
||||
rollback = self.version if not self.error_state else None
|
||||
running = await self.instance.is_running()
|
||||
exists = await self.instance.exists()
|
||||
|
||||
if exists and version == self.instance.version:
|
||||
_LOGGER.warning("Version %s is already installed", version)
|
||||
return HomeAssistantUpdateError()
|
||||
|
||||
# process an update
|
||||
async def _update(to_version):
|
||||
"""Run Home Assistant update."""
|
||||
try:
|
||||
_LOGGER.info("Update Home Assistant to version %s", to_version)
|
||||
if not await self.instance.update(to_version):
|
||||
raise HomeAssistantUpdateError()
|
||||
finally:
|
||||
if running:
|
||||
await self._start()
|
||||
_LOGGER.info("Successful run Home Assistant %s", to_version)
|
||||
|
||||
# Update Home Assistant
|
||||
with suppress(HomeAssistantError):
|
||||
await _update(version)
|
||||
return
|
||||
|
||||
# Update going wrong, revert it
|
||||
if self.error_state and rollback:
|
||||
_LOGGER.fatal("HomeAssistant update fails -> rollback!")
|
||||
await _update(rollback)
|
||||
else:
|
||||
raise HomeAssistantUpdateError()
|
||||
|
||||
async def _start(self):
|
||||
"""Start Home Assistant Docker & wait."""
|
||||
if await self.instance.is_running():
|
||||
_LOGGER.warning("Home Assistant is already running!")
|
||||
return
|
||||
|
||||
# Create new API token
|
||||
self._data[ATTR_ACCESS_TOKEN] = create_token()
|
||||
self.save_data()
|
||||
|
||||
if not await self.instance.run():
|
||||
raise HomeAssistantError()
|
||||
await self._block_till_run()
|
||||
|
||||
@process_lock
|
||||
def start(self):
|
||||
"""Run Home Assistant docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self._start()
|
||||
|
||||
@process_lock
|
||||
def stop(self):
|
||||
"""Stop Home Assistant Docker.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stop()
|
||||
|
||||
@process_lock
|
||||
async def restart(self):
|
||||
"""Restart Home Assistant Docker."""
|
||||
await self.instance.stop()
|
||||
await self._start()
|
||||
|
||||
def logs(self):
|
||||
"""Get HomeAssistant docker logs.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.logs()
|
||||
|
||||
def stats(self):
|
||||
"""Return stats of Home Assistant.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.stats()
|
||||
|
||||
def is_running(self):
|
||||
"""Return True if Docker container is running.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_running()
|
||||
|
||||
def is_initialize(self):
|
||||
"""Return True if a Docker container is exists.
|
||||
|
||||
Return a coroutine.
|
||||
"""
|
||||
return self.instance.is_initialize()
|
||||
|
||||
@property
|
||||
def in_progress(self):
|
||||
"""Return True if a task is in progress."""
|
||||
return self.instance.in_progress or self.lock.locked()
|
||||
|
||||
async def check_config(self):
|
||||
"""Run Home Assistant config check."""
|
||||
result = await self.instance.execute_command(
|
||||
"python3 -m homeassistant -c /config --script check_config")
|
||||
|
||||
# if not valid
|
||||
if result.exit_code is None:
|
||||
_LOGGER.error("Fatal error on config check!")
|
||||
raise HomeAssistantError()
|
||||
|
||||
# parse output
|
||||
log = convert_to_ascii(result.output)
|
||||
if result.exit_code != 0 or RE_YAML_ERROR.search(log):
|
||||
_LOGGER.error("Invalid Home Assistant config found!")
|
||||
return ConfigResult(False, log)
|
||||
|
||||
_LOGGER.info("Home Assistant config is valid")
|
||||
return ConfigResult(True, log)
|
||||
|
||||
async def ensure_access_token(self):
|
||||
"""Ensures there is an access token."""
|
||||
if self.access_token is not None and self._access_token_expires > datetime.utcnow():
|
||||
return
|
||||
|
||||
with suppress(asyncio.TimeoutError, aiohttp.ClientError):
|
||||
async with self.sys_websession_ssl.post(
|
||||
f"{self.api_url}/auth/token",
|
||||
timeout=30,
|
||||
data={
|
||||
"grant_type": "refresh_token",
|
||||
"refresh_token": self.refresh_token
|
||||
}) as resp:
|
||||
if resp.status != 200:
|
||||
_LOGGER.error("Can't update Home Assistant access token!")
|
||||
raise HomeAssistantAuthError()
|
||||
|
||||
_LOGGER.info("Updated Home Assistant API token")
|
||||
tokens = await resp.json()
|
||||
self.access_token = tokens['access_token']
|
||||
self._access_token_expires = \
|
||||
datetime.utcnow() + timedelta(seconds=tokens['expires_in'])
|
||||
|
||||
@asynccontextmanager
|
||||
async def make_request(self,
|
||||
method,
|
||||
path,
|
||||
json=None,
|
||||
content_type=None,
|
||||
data=None,
|
||||
timeout=30):
|
||||
"""Async context manager to make a request with right auth."""
|
||||
url = f"{self.api_url}/{path}"
|
||||
headers = {}
|
||||
|
||||
# Passthrough content type
|
||||
if content_type is not None:
|
||||
headers[hdrs.CONTENT_TYPE] = content_type
|
||||
|
||||
# Set old API Password
|
||||
if self.api_password:
|
||||
headers[HEADER_HA_ACCESS] = self.api_password
|
||||
|
||||
for _ in (1, 2):
|
||||
# Prepare Access token
|
||||
if self.refresh_token:
|
||||
await self.ensure_access_token()
|
||||
headers[hdrs.AUTHORIZATION] = f'Bearer {self.access_token}'
|
||||
|
||||
try:
|
||||
async with getattr(self.sys_websession_ssl, method)(
|
||||
url, data=data, timeout=timeout, json=json,
|
||||
headers=headers) as resp:
|
||||
# Access token expired
|
||||
if resp.status == 401 and self.refresh_token:
|
||||
self.access_token = None
|
||||
continue
|
||||
yield resp
|
||||
return
|
||||
except (asyncio.TimeoutError, aiohttp.ClientError) as err:
|
||||
_LOGGER.error("Error on call %s: %s", url, err)
|
||||
break
|
||||
|
||||
raise HomeAssistantAPIError()
|
||||
|
||||
async def check_api_state(self):
|
||||
"""Return True if Home Assistant up and running."""
|
||||
with suppress(HomeAssistantAPIError):
|
||||
async with self.make_request('get', 'api/') as resp:
|
||||
if resp.status in (200, 201):
|
||||
return True
|
||||
status = resp.status
|
||||
_LOGGER.warning("Home Assistant API config mismatch: %s", status)
|
||||
|
||||
return False
|
||||
|
||||
async def _block_till_run(self):
|
||||
"""Block until Home-Assistant is booting up or startup timeout."""
|
||||
start_time = time.monotonic()
|
||||
migration_progress = False
|
||||
migration_file = Path(self.sys_config.path_homeassistant,
|
||||
'.migration_progress')
|
||||
|
||||
def check_port():
|
||||
"""Check if port is mapped."""
|
||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||
try:
|
||||
result = sock.connect_ex((str(self.api_ip), self.api_port))
|
||||
sock.close()
|
||||
|
||||
# Check if the port is available
|
||||
if result == 0:
|
||||
return True
|
||||
except OSError:
|
||||
pass
|
||||
return False
|
||||
|
||||
while True:
|
||||
await asyncio.sleep(5)
|
||||
|
||||
# 1: Check if Container is is_running
|
||||
if not await self.instance.is_running():
|
||||
_LOGGER.error("Home Assistant has crashed!")
|
||||
break
|
||||
|
||||
# 2: Check if API response
|
||||
if await self.sys_run_in_executor(check_port):
|
||||
_LOGGER.info("Detect a running Home Assistant instance")
|
||||
self._error_state = False
|
||||
return
|
||||
|
||||
# 3: Running DB Migration
|
||||
if migration_file.exists():
|
||||
if not migration_progress:
|
||||
migration_progress = True
|
||||
_LOGGER.info("Home Assistant record migration in progress")
|
||||
continue
|
||||
elif migration_progress:
|
||||
migration_progress = False # Reset start time
|
||||
start_time = time.monotonic()
|
||||
_LOGGER.info("Home Assistant record migration done")
|
||||
|
||||
# 4: Timeout
|
||||
if time.monotonic() - start_time > self.wait_boot:
|
||||
_LOGGER.warning("Don't wait anymore of Home Assistant startup!")
|
||||
break
|
||||
|
||||
self._error_state = True
|
||||
raise HomeAssistantError()
|
@@ -1,93 +0,0 @@
|
||||
"""Host function like audio, D-Bus or systemd."""
|
||||
from contextlib import suppress
|
||||
import logging
|
||||
|
||||
from .alsa import AlsaAudio
|
||||
from .apparmor import AppArmorControl
|
||||
from .control import SystemControl
|
||||
from .info import InfoCenter
|
||||
from .services import ServiceManager
|
||||
from ..const import (
|
||||
FEATURES_REBOOT, FEATURES_SHUTDOWN, FEATURES_HOSTNAME, FEATURES_SERVICES,
|
||||
FEATURES_HASSOS)
|
||||
from ..coresys import CoreSysAttributes
|
||||
from ..exceptions import HassioError
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class HostManager(CoreSysAttributes):
|
||||
"""Manage supported function from host."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize Host manager."""
|
||||
self.coresys = coresys
|
||||
self._alsa = AlsaAudio(coresys)
|
||||
self._apparmor = AppArmorControl(coresys)
|
||||
self._control = SystemControl(coresys)
|
||||
self._info = InfoCenter(coresys)
|
||||
self._services = ServiceManager(coresys)
|
||||
|
||||
@property
|
||||
def alsa(self):
|
||||
"""Return host ALSA handler."""
|
||||
return self._alsa
|
||||
|
||||
@property
|
||||
def apparmor(self):
|
||||
"""Return host AppArmor handler."""
|
||||
return self._apparmor
|
||||
|
||||
@property
|
||||
def control(self):
|
||||
"""Return host control handler."""
|
||||
return self._control
|
||||
|
||||
@property
|
||||
def info(self):
|
||||
"""Return host info handler."""
|
||||
return self._info
|
||||
|
||||
@property
|
||||
def services(self):
|
||||
"""Return host services handler."""
|
||||
return self._services
|
||||
|
||||
@property
|
||||
def supperted_features(self):
|
||||
"""Return a list of supported host features."""
|
||||
features = []
|
||||
|
||||
if self.sys_dbus.systemd.is_connected:
|
||||
features.extend([
|
||||
FEATURES_REBOOT,
|
||||
FEATURES_SHUTDOWN,
|
||||
FEATURES_SERVICES,
|
||||
])
|
||||
|
||||
if self.sys_dbus.hostname.is_connected:
|
||||
features.append(FEATURES_HOSTNAME)
|
||||
|
||||
if self.sys_hassos.available:
|
||||
features.append(FEATURES_HASSOS)
|
||||
|
||||
return features
|
||||
|
||||
async def reload(self):
|
||||
"""Reload host functions."""
|
||||
if self.sys_dbus.hostname.is_connected:
|
||||
await self.info.update()
|
||||
|
||||
if self.sys_dbus.systemd.is_connected:
|
||||
await self.services.update()
|
||||
|
||||
async def load(self):
|
||||
"""Load host information."""
|
||||
with suppress(HassioError):
|
||||
await self.reload()
|
||||
|
||||
# Load profile data
|
||||
try:
|
||||
await self.apparmor.load()
|
||||
except HassioError as err:
|
||||
_LOGGER.waring("Load host AppArmor on start fails: %s", err)
|
@@ -1,140 +0,0 @@
|
||||
"""Host Audio support."""
|
||||
import logging
|
||||
import json
|
||||
from pathlib import Path
|
||||
from string import Template
|
||||
|
||||
import attr
|
||||
|
||||
from ..const import (
|
||||
ATTR_INPUT, ATTR_OUTPUT, ATTR_DEVICES, ATTR_NAME, CHAN_ID, CHAN_TYPE)
|
||||
from ..coresys import CoreSysAttributes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
DefaultConfig = attr.make_class('DefaultConfig', ['input', 'output'])
|
||||
|
||||
|
||||
class AlsaAudio(CoreSysAttributes):
|
||||
"""Handle Audio ALSA host data."""
|
||||
|
||||
def __init__(self, coresys):
|
||||
"""Initialize ALSA audio system."""
|
||||
self.coresys = coresys
|
||||
self._data = {
|
||||
ATTR_INPUT: {},
|
||||
ATTR_OUTPUT: {},
|
||||
}
|
||||
self._cache = 0
|
||||
self._default = None
|
||||
|
||||
@property
|
||||
def input_devices(self):
|
||||
"""Return list of ALSA input devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_INPUT]
|
||||
|
||||
@property
|
||||
def output_devices(self):
|
||||
"""Return list of ALSA output devices."""
|
||||
self._update_device()
|
||||
return self._data[ATTR_OUTPUT]
|
||||
|
||||
def _update_device(self):
|
||||
"""Update Internal device DB."""
|
||||
current_id = hash(frozenset(self.sys_hardware.audio_devices))
|
||||
|
||||
# Need rebuild?
|
||||
if current_id == self._cache:
|
||||
return
|
||||
|
||||
# Clean old stuff
|
||||
self._data[ATTR_INPUT].clear()
|
||||
self._data[ATTR_OUTPUT].clear()
|
||||
|
||||
# Init database
|
||||
_LOGGER.info("Update ALSA device list")
|
||||
database = self._audio_database()
|
||||
|
||||
# Process devices
|
||||
for dev_id, dev_data in self.sys_hardware.audio_devices.items():
|
||||
for chan_info in dev_data[ATTR_DEVICES]:
|
||||
chan_id = chan_info[CHAN_ID]
|
||||
chan_type = chan_info[CHAN_TYPE]
|
||||
alsa_id = f"{dev_id},{chan_id}"
|
||||
dev_name = dev_data[ATTR_NAME]
|
||||
|
||||
# Lookup type
|
||||
if chan_type.endswith('playback'):
|
||||
key = ATTR_OUTPUT
|
||||
elif chan_type.endswith('capture'):
|
||||
key = ATTR_INPUT
|
||||
else:
|
||||
_LOGGER.warning("Unknown channel type: %s", chan_type)
|
||||
continue
|
||||
|
||||
# Use name from DB or a generic name
|
||||
self._data[key][alsa_id] = database.get(
|
||||
self.sys_machine, {}).get(
|
||||
dev_name, {}).get(alsa_id, f"{dev_name}: {chan_id}")
|
||||
|
||||
self._cache = current_id
|
||||
|
||||
@staticmethod
|
||||
def _audio_database():
|
||||
"""Read local json audio data into dict."""
|
||||
json_file = Path(__file__).parent.joinpath("data/audiodb.json")
|
||||
|
||||
try:
|
||||
# pylint: disable=no-member
|
||||
with json_file.open('r') as database:
|
||||
return json.loads(database.read())
|
||||
except (ValueError, OSError) as err:
|
||||
_LOGGER.warning("Can't read audio DB: %s", err)
|
||||
|
||||
return {}
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
"""Generate ALSA default setting."""
|
||||
# Init defaults
|
||||
if self._default is None:
|
||||
database = self._audio_database()
|
||||
alsa_input = database.get(self.sys_machine, {}).get(ATTR_INPUT)
|
||||
alsa_output = database.get(self.sys_machine, {}).get(ATTR_OUTPUT)
|
||||
|
||||
self._default = DefaultConfig(alsa_input, alsa_output)
|
||||
|
||||
# Search exists/new output
|
||||
if self._default.output is None and self.output_devices:
|
||||
self._default.output = next(iter(self.output_devices))
|
||||
_LOGGER.info("Detect output device %s", self._default.output)
|
||||
|
||||
# Search exists/new input
|
||||
if self._default.input is None and self.input_devices:
|
||||
self._default.input = next(iter(self.input_devices))
|
||||
_LOGGER.info("Detect input device %s", self._default.input)
|
||||
|
||||
return self._default
|
||||
|
||||
def asound(self, alsa_input=None, alsa_output=None):
|
||||
"""Generate an asound data."""
|
||||
alsa_input = alsa_input or self.default.input
|
||||
alsa_output = alsa_output or self.default.output
|
||||
|
||||
# Read Template
|
||||
asound_file = Path(__file__).parent.joinpath("data/asound.tmpl")
|
||||
try:
|
||||
# pylint: disable=no-member
|
||||
with asound_file.open('r') as asound:
|
||||
asound_data = asound.read()
|
||||
except OSError as err:
|
||||
_LOGGER.error("Can't read asound.tmpl: %s", err)
|
||||
return ""
|
||||
|
||||
# Process Template
|
||||
asound_template = Template(asound_data)
|
||||
return asound_template.safe_substitute(
|
||||
input=alsa_input, output=alsa_output
|
||||
)
|
@@ -1,17 +0,0 @@
|
||||
pcm.!default {
|
||||
type asym
|
||||
capture.pcm "mic"
|
||||
playback.pcm "speaker"
|
||||
}
|
||||
pcm.mic {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$input"
|
||||
}
|
||||
}
|
||||
pcm.speaker {
|
||||
type plug
|
||||
slave {
|
||||
pcm "hw:$output"
|
||||
}
|
||||
}
|
@@ -1,18 +0,0 @@
|
||||
{
|
||||
"raspberrypi3": {
|
||||
"bcm2835 - bcm2835 ALSA": {
|
||||
"0,0": "Raspberry Jack",
|
||||
"0,1": "Raspberry HDMI"
|
||||
},
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi2": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
},
|
||||
"raspberrypi": {
|
||||
"output": "0,0",
|
||||
"input": null
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user