mirror of
https://github.com/home-assistant/core.git
synced 2025-11-03 07:59:30 +00:00
Compare commits
1281 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ad341e2152 | ||
|
|
d64730a3cf | ||
|
|
a8c4fc33f6 | ||
|
|
476a727df3 | ||
|
|
1d5709f49f | ||
|
|
725d5c636e | ||
|
|
414b85c253 | ||
|
|
56ca0edaa7 | ||
|
|
7168dd6cec | ||
|
|
d3f6c43bbd | ||
|
|
59b42b4236 | ||
|
|
207cf18a46 | ||
|
|
5961fbb710 | ||
|
|
37d78af42c | ||
|
|
05ecc5a135 | ||
|
|
dcdbd08d23 | ||
|
|
11a4d36c69 | ||
|
|
a4920d3afb | ||
|
|
6b2d40327c | ||
|
|
620cb74050 | ||
|
|
93c0db2328 | ||
|
|
0ccffc3e55 | ||
|
|
4de97abc3a | ||
|
|
da05dfe708 | ||
|
|
0490167a12 | ||
|
|
3cf8964c06 | ||
|
|
671cb0d092 | ||
|
|
fcdd66b33b | ||
|
|
4bef2412d2 | ||
|
|
e1d884a484 | ||
|
|
5e7465a261 | ||
|
|
92991b53c4 | ||
|
|
11ebd8546c | ||
|
|
16a98359c3 | ||
|
|
8ffc6c05b7 | ||
|
|
3a3f70ef21 | ||
|
|
90dc81c1b3 | ||
|
|
3d11c45edd | ||
|
|
35c048fe6b | ||
|
|
1c0d847353 | ||
|
|
96e84692ef | ||
|
|
2e05431642 | ||
|
|
255332aca8 | ||
|
|
42c50c5b5e | ||
|
|
1e8a4dd0bc | ||
|
|
ffe6ddeba7 | ||
|
|
39b8102ce6 | ||
|
|
fe1e761a7a | ||
|
|
0257fe0375 | ||
|
|
f8bb0e1229 | ||
|
|
5aa35b52cc | ||
|
|
2c144bc412 | ||
|
|
2d10e61c23 | ||
|
|
15ae970941 | ||
|
|
67cae00caa | ||
|
|
35900964cb | ||
|
|
71acc6d3f8 | ||
|
|
891f19b43f | ||
|
|
3a91c8f285 | ||
|
|
c4f673c894 | ||
|
|
dc722adbb5 | ||
|
|
2e300aec5a | ||
|
|
b87b29dff6 | ||
|
|
65a29e3371 | ||
|
|
7e6d47d64a | ||
|
|
a90ec88e5c | ||
|
|
cc74b22ce8 | ||
|
|
f379bb4016 | ||
|
|
1f9f201571 | ||
|
|
03052802a4 | ||
|
|
1aae84173a | ||
|
|
bc38d394d5 | ||
|
|
e225243bc5 | ||
|
|
7ceedd15b3 | ||
|
|
14c3b38461 | ||
|
|
c6c4c07f2d | ||
|
|
a14c299a78 | ||
|
|
4936e55979 | ||
|
|
da53e0a836 | ||
|
|
3672a5f881 | ||
|
|
0be0353eed | ||
|
|
4d7fd8ae17 | ||
|
|
1f09967abb | ||
|
|
7d68def303 | ||
|
|
9efb759a98 | ||
|
|
0a6d49b293 | ||
|
|
297cd3dc13 | ||
|
|
0df1bb5029 | ||
|
|
1c3e5988db | ||
|
|
230ca9b89d | ||
|
|
3c7be11c31 | ||
|
|
668deeb7bd | ||
|
|
8c61808ce4 | ||
|
|
36f3940c85 | ||
|
|
fc36927468 | ||
|
|
0fc2813177 | ||
|
|
a17e28cc78 | ||
|
|
2c8c8009ff | ||
|
|
2087358d58 | ||
|
|
3512d05467 | ||
|
|
4f8a93fb3e | ||
|
|
47f3be1fe4 | ||
|
|
e79af97fdc | ||
|
|
46b9e9cdfb | ||
|
|
94f5b262be | ||
|
|
ea5d3ce85a | ||
|
|
b6934f0cd0 | ||
|
|
59c62a261b | ||
|
|
fae3546910 | ||
|
|
b230562c76 | ||
|
|
a50f1ae614 | ||
|
|
e8e84fb764 | ||
|
|
10b120f11f | ||
|
|
408af6e842 | ||
|
|
cd0277c2c3 | ||
|
|
00a5a5f3c0 | ||
|
|
18ba2f986e | ||
|
|
b0e4260562 | ||
|
|
f799bbf2a7 | ||
|
|
9e36448f03 | ||
|
|
561bbecd25 | ||
|
|
df51c07a88 | ||
|
|
fb9ca0d4da | ||
|
|
f07c714c01 | ||
|
|
8a2fdb5045 | ||
|
|
f1e4153b2c | ||
|
|
c886d00bab | ||
|
|
065a5c5df6 | ||
|
|
e2e7d39527 | ||
|
|
9fc4b878e2 | ||
|
|
638f5b1932 | ||
|
|
956cdba588 | ||
|
|
f11f0956d3 | ||
|
|
1c861b9732 | ||
|
|
86d1bb651e | ||
|
|
828f76ca57 | ||
|
|
f5d0f36caf | ||
|
|
4fb1937f65 | ||
|
|
e4b4551b35 | ||
|
|
5e2dfb14fb | ||
|
|
4c067ecff7 | ||
|
|
2fb03106ea | ||
|
|
a8ec826ef7 | ||
|
|
738d00fb05 | ||
|
|
5ce6ea2df5 | ||
|
|
2850f9d19e | ||
|
|
20ed07cc5c | ||
|
|
2354108e6f | ||
|
|
a5c2a80db3 | ||
|
|
bd2b107575 | ||
|
|
c92f287c73 | ||
|
|
3af77eb594 | ||
|
|
8e4a234bbf | ||
|
|
ee7ec5f234 | ||
|
|
80051b7fc2 | ||
|
|
ca989cba44 | ||
|
|
aa062176ca | ||
|
|
a1bccb1934 | ||
|
|
9470829978 | ||
|
|
b71cb73c80 | ||
|
|
0caab133e6 | ||
|
|
e6445a602b | ||
|
|
8f2de2bf1b | ||
|
|
7cf0684aa1 | ||
|
|
5e805768aa | ||
|
|
8c69fd91ff | ||
|
|
58f946e452 | ||
|
|
bf37cc8371 | ||
|
|
11c74cd0d7 | ||
|
|
797196dce9 | ||
|
|
81bde77c04 | ||
|
|
0be2dad651 | ||
|
|
a652a4d9e9 | ||
|
|
2189cb0ee7 | ||
|
|
15064e83b4 | ||
|
|
8538e69e28 | ||
|
|
35c719628d | ||
|
|
0eab89c8f4 | ||
|
|
a3043b9a90 | ||
|
|
c795c93034 | ||
|
|
2228a0dcac | ||
|
|
68e7f4ca5a | ||
|
|
e052bcb03b | ||
|
|
a56b604936 | ||
|
|
f6b6818fb0 | ||
|
|
93a65bf507 | ||
|
|
ec302912a3 | ||
|
|
50d4921d0a | ||
|
|
17d754dbbf | ||
|
|
5e29d4d098 | ||
|
|
97a13bdcd4 | ||
|
|
7f5607c918 | ||
|
|
d7bd7f2c4c | ||
|
|
b6ba24de5d | ||
|
|
7a8130cd2b | ||
|
|
d64f1e767c | ||
|
|
0653f57fb4 | ||
|
|
615af773e5 | ||
|
|
aa27e22b17 | ||
|
|
9c51650ea3 | ||
|
|
95223cb9ea | ||
|
|
ba04ff17b2 | ||
|
|
b0b2b0d654 | ||
|
|
01430262cd | ||
|
|
83581be4d5 | ||
|
|
fc5b1c7005 | ||
|
|
56e4a2aea6 | ||
|
|
7ea27c0f2a | ||
|
|
258dc80fbd | ||
|
|
22d9a73e8e | ||
|
|
1fddf47e8f | ||
|
|
0da0dda39c | ||
|
|
48540fc21e | ||
|
|
693fa15924 | ||
|
|
c8abbf6d76 | ||
|
|
979f801488 | ||
|
|
caa7a3a3d6 | ||
|
|
290d32267e | ||
|
|
a6e3cc6617 | ||
|
|
b4481269ec | ||
|
|
752d0deb97 | ||
|
|
662c33af85 | ||
|
|
fc384ca6d5 | ||
|
|
49e2583b08 | ||
|
|
8629b86186 | ||
|
|
68c4e5c0c9 | ||
|
|
c4d1cd0e03 | ||
|
|
8b020ea5e6 | ||
|
|
fd2d6c8a74 | ||
|
|
5552a5be70 | ||
|
|
b9c6758dba | ||
|
|
5015311d6b | ||
|
|
1eb66f3657 | ||
|
|
bc7e1a3797 | ||
|
|
003f7865a9 | ||
|
|
33cba4da85 | ||
|
|
1c6d55e51b | ||
|
|
3fd138bbdd | ||
|
|
3db106c562 | ||
|
|
34c3d1ce47 | ||
|
|
cc595632bd | ||
|
|
f76700567e | ||
|
|
86cf02739b | ||
|
|
46cdbd273a | ||
|
|
ec3cb11e2f | ||
|
|
2016cf872e | ||
|
|
37810e010a | ||
|
|
2b69904b94 | ||
|
|
59cf6a0c79 | ||
|
|
39b249d202 | ||
|
|
d57cf01cf2 | ||
|
|
997187c7d3 | ||
|
|
217da36c86 | ||
|
|
be56851feb | ||
|
|
53954d6f8f | ||
|
|
5c53257c23 | ||
|
|
c7ebd109b8 | ||
|
|
32e89dcbb6 | ||
|
|
93970b5621 | ||
|
|
cfc2c58fe0 | ||
|
|
516bab9969 | ||
|
|
89ed26eb86 | ||
|
|
e13e4376f8 | ||
|
|
75ad5f8c9e | ||
|
|
2accd8ed1c | ||
|
|
d9e4050cdf | ||
|
|
bbf1ee4c68 | ||
|
|
70cab201db | ||
|
|
9a79a0aa90 | ||
|
|
7089188fd5 | ||
|
|
ccc4f628f1 | ||
|
|
90231c5e07 | ||
|
|
5b24e46a29 | ||
|
|
1215398aef | ||
|
|
9550a38f22 | ||
|
|
4e20e4964e | ||
|
|
ff5dd0cf42 | ||
|
|
5d7f420821 | ||
|
|
a5012f39da | ||
|
|
e4bb955498 | ||
|
|
8f7767d5e5 | ||
|
|
74d0e65958 | ||
|
|
3cfbbdc720 | ||
|
|
3d5c773670 | ||
|
|
b5b0f56ae7 | ||
|
|
c03d5f1a73 | ||
|
|
5abe4dd1f7 | ||
|
|
b507822280 | ||
|
|
ded9eb89bb | ||
|
|
bc4f91a89a | ||
|
|
971223de19 | ||
|
|
60ca8b95a4 | ||
|
|
a012c61762 | ||
|
|
21f68b80ea | ||
|
|
8bae7a45a5 | ||
|
|
2bac24fbb7 | ||
|
|
ac91423d71 | ||
|
|
56841da2d3 | ||
|
|
026dbffa77 | ||
|
|
e74fc9836d | ||
|
|
c7dfec702d | ||
|
|
0f8f9db319 | ||
|
|
366ad8202a | ||
|
|
20301ae888 | ||
|
|
de3d28d9d5 | ||
|
|
b52848d376 | ||
|
|
9c2625f0a5 | ||
|
|
4afc19ff3a | ||
|
|
91d065314c | ||
|
|
8a6515936d | ||
|
|
3381fa0ac4 | ||
|
|
aac01aaa50 | ||
|
|
a096858426 | ||
|
|
3d3dd05789 | ||
|
|
f9ae6f6ce7 | ||
|
|
e8fd01bea5 | ||
|
|
64b9102206 | ||
|
|
dcb12a992a | ||
|
|
25285ef6a7 | ||
|
|
5e5abf77da | ||
|
|
c2f4f06005 | ||
|
|
28bd7b6a4e | ||
|
|
c04049d6f6 | ||
|
|
ff79e437d2 | ||
|
|
c8b495f224 | ||
|
|
842c1a2274 | ||
|
|
50b145cf05 | ||
|
|
78a0d72a5c | ||
|
|
97ca0d81e7 | ||
|
|
02e8ee137f | ||
|
|
2643bbc228 | ||
|
|
c8d7e1346c | ||
|
|
7dedf173ad | ||
|
|
65593e36b1 | ||
|
|
8996e330b8 | ||
|
|
e85d434f4e | ||
|
|
82d9488ec8 | ||
|
|
cca50a8339 | ||
|
|
84373ce754 | ||
|
|
67546ce0b1 | ||
|
|
4fc302b67a | ||
|
|
ac33c22689 | ||
|
|
7aae490a85 | ||
|
|
50f9117982 | ||
|
|
99c6c60bec | ||
|
|
9548345ed0 | ||
|
|
d444ba397b | ||
|
|
62df3c00df | ||
|
|
831564784a | ||
|
|
17013c7c2c | ||
|
|
3ddd482cc1 | ||
|
|
bcf85a0df1 | ||
|
|
0a8b68fd4d | ||
|
|
08f12750f1 | ||
|
|
1798522ec8 | ||
|
|
d91e5a6b66 | ||
|
|
b57c60ad7a | ||
|
|
fa8ae0865e | ||
|
|
01b890f426 | ||
|
|
3480e6229a | ||
|
|
e6a2dde19a | ||
|
|
9d4b5ee58d | ||
|
|
369e6a3905 | ||
|
|
b77d060304 | ||
|
|
a147a189ca | ||
|
|
1e474bb5da | ||
|
|
d37d1ce4ad | ||
|
|
8ec75cf883 | ||
|
|
59f6fd7630 | ||
|
|
68edf10270 | ||
|
|
c6b63b15b8 | ||
|
|
f705a1e62e | ||
|
|
c884f9edbc | ||
|
|
d0af73efe1 | ||
|
|
5eb7268ae7 | ||
|
|
60c2e5e2e2 | ||
|
|
4e69b5b45f | ||
|
|
1d784bdc05 | ||
|
|
9181660497 | ||
|
|
f7aa1b026f | ||
|
|
c73fa6157d | ||
|
|
de43237f6d | ||
|
|
49abda2d49 | ||
|
|
1368501cba | ||
|
|
eae63cd231 | ||
|
|
b69663857b | ||
|
|
a9980c8be0 | ||
|
|
31dd6364c3 | ||
|
|
0478e7f41d | ||
|
|
f25f44a75b | ||
|
|
bbe45cbd4b | ||
|
|
69cc6affd5 | ||
|
|
8fdebf4f8f | ||
|
|
f2ae2c128d | ||
|
|
95abd91354 | ||
|
|
aaeca69bd5 | ||
|
|
7fc8ff982b | ||
|
|
155c75c54a | ||
|
|
afade4e997 | ||
|
|
53111f6426 | ||
|
|
53a701b12c | ||
|
|
a0e45cce79 | ||
|
|
2b62ea1f0e | ||
|
|
cc7b65a6c8 | ||
|
|
60fe4c9ae0 | ||
|
|
6173d7c8a0 | ||
|
|
d47905d119 | ||
|
|
f5a4af40ee | ||
|
|
e299d7b3d6 | ||
|
|
78a5dc71ac | ||
|
|
04b4284746 | ||
|
|
2be5e0dcf9 | ||
|
|
71ddebbf41 | ||
|
|
27d750db1c | ||
|
|
3b6b421152 | ||
|
|
b8ee3536b3 | ||
|
|
fcb1783f56 | ||
|
|
8937d44399 | ||
|
|
8041339052 | ||
|
|
f0fe865798 | ||
|
|
2eecb08b51 | ||
|
|
51a40c0441 | ||
|
|
177f5a35ae | ||
|
|
87d3680630 | ||
|
|
c6af8811fb | ||
|
|
bd7c0e87d5 | ||
|
|
df920b4eda | ||
|
|
cde3f670c2 | ||
|
|
c80683bb15 | ||
|
|
073327831f | ||
|
|
312fceeaf6 | ||
|
|
42d2f30ab8 | ||
|
|
4844477d3a | ||
|
|
ca8118138c | ||
|
|
e51b5e801e | ||
|
|
9ccb85d959 | ||
|
|
cea857e18a | ||
|
|
1afa136fc0 | ||
|
|
7d33b0a259 | ||
|
|
777e1ca832 | ||
|
|
2e26f0bd2b | ||
|
|
236debb455 | ||
|
|
5f5c541f2f | ||
|
|
f0f7dc4884 | ||
|
|
18d27c997d | ||
|
|
a44686389c | ||
|
|
98ba015f06 | ||
|
|
c1c2159dee | ||
|
|
a30c37017b | ||
|
|
195b034abc | ||
|
|
c5239c6176 | ||
|
|
5be695c49c | ||
|
|
8652c84745 | ||
|
|
36ed725ab4 | ||
|
|
cf5a35a421 | ||
|
|
8256d72f6d | ||
|
|
25745e9e27 | ||
|
|
3ce1049d21 | ||
|
|
f3e542542a | ||
|
|
07b635e7aa | ||
|
|
c2e843cbc3 | ||
|
|
7a5fca69af | ||
|
|
3016d3a186 | ||
|
|
a31e49c857 | ||
|
|
2fbbcafaed | ||
|
|
a2237ce5d4 | ||
|
|
af7f61fec2 | ||
|
|
26a66276cd | ||
|
|
9944e675a5 | ||
|
|
f9b9883aba | ||
|
|
1431fd6fbd | ||
|
|
b11171aaeb | ||
|
|
0b7a901c81 | ||
|
|
662e0dde80 | ||
|
|
ab832cda71 | ||
|
|
f90fe7e628 | ||
|
|
32685f16bf | ||
|
|
84cf76ba36 | ||
|
|
c2f1c4b981 | ||
|
|
31d7b702a6 | ||
|
|
df4caf41d0 | ||
|
|
0595fc3097 | ||
|
|
b0dc782c98 | ||
|
|
ecd7f86df0 | ||
|
|
b834671555 | ||
|
|
6e24b52a7e | ||
|
|
628e12c944 | ||
|
|
adbec5bffc | ||
|
|
e8a5306c23 | ||
|
|
b274b10f38 | ||
|
|
ac4f2c9f73 | ||
|
|
97ed7fbb3f | ||
|
|
003ca655ee | ||
|
|
412910ca65 | ||
|
|
31f569ada9 | ||
|
|
e75c9efb3f | ||
|
|
e93919673e | ||
|
|
c814b39fdb | ||
|
|
a491f97eb9 | ||
|
|
3c487928d4 | ||
|
|
e824c553ca | ||
|
|
2634f35b4e | ||
|
|
a1aaeab33a | ||
|
|
e9816f7e30 | ||
|
|
a9459c6d92 | ||
|
|
eec67d8b1a | ||
|
|
e8d9fe0aa8 | ||
|
|
aa03550f6b | ||
|
|
61c88db8a1 | ||
|
|
8dca73d08e | ||
|
|
3f4ce70414 | ||
|
|
945afbc6d4 | ||
|
|
c0a342d790 | ||
|
|
6de6c10bc3 | ||
|
|
6c25c9760a | ||
|
|
7bf140f921 | ||
|
|
e3d281b3c4 | ||
|
|
0c43c4b5e1 | ||
|
|
23dd644f4a | ||
|
|
7f90a1cab2 | ||
|
|
b6e0f538c5 | ||
|
|
8cd138608c | ||
|
|
846575b7fb | ||
|
|
3d2f843c1d | ||
|
|
5ba83d4dfb | ||
|
|
0dd19ed49c | ||
|
|
77b83b9e4d | ||
|
|
7db4eeaf7f | ||
|
|
7d651e2b7a | ||
|
|
40c424e793 | ||
|
|
a6ea5d43b4 | ||
|
|
bf70e91a0d | ||
|
|
5cf923ead6 | ||
|
|
fec2461e0e | ||
|
|
c71a5643ff | ||
|
|
b0387c4428 | ||
|
|
1e149a704b | ||
|
|
cb71b4a657 | ||
|
|
26cc41094d | ||
|
|
9946b19735 | ||
|
|
6ad9a97f0d | ||
|
|
a91ad0189e | ||
|
|
67b6657bcd | ||
|
|
e1a34c8030 | ||
|
|
b70f907d25 | ||
|
|
cde855f67d | ||
|
|
9cf43dd8ff | ||
|
|
03e6a92cf3 | ||
|
|
21c2e8da6e | ||
|
|
b3963e56ec | ||
|
|
c6e8e2398c | ||
|
|
4b5718431d | ||
|
|
333e1d6789 | ||
|
|
2e9c71f2c0 | ||
|
|
072879cc6e | ||
|
|
cc75adfed6 | ||
|
|
3cafc1f2c6 | ||
|
|
19a65f8db6 | ||
|
|
e8d1d28fdd | ||
|
|
9ad063ce03 | ||
|
|
f67693c56c | ||
|
|
9616fbdc36 | ||
|
|
48dd5af9e3 | ||
|
|
bc0fb5e3d9 | ||
|
|
8e2bbf8c82 | ||
|
|
538caafac2 | ||
|
|
0871d6c9c6 | ||
|
|
468b0e8934 | ||
|
|
6cbfc63311 | ||
|
|
2886b217ab | ||
|
|
fafc68673a | ||
|
|
1990df63aa | ||
|
|
e39f0f3e25 | ||
|
|
1f5e2fa3ce | ||
|
|
204dd77404 | ||
|
|
4e5b1ccde6 | ||
|
|
80844ae2ee | ||
|
|
a69a00785f | ||
|
|
41dd70f644 | ||
|
|
e5b8d5f7ea | ||
|
|
c49869160b | ||
|
|
69089da88e | ||
|
|
e43a733017 | ||
|
|
3eb6b9d297 | ||
|
|
ac5ab52d01 | ||
|
|
0d89b82bff | ||
|
|
0cde24e103 | ||
|
|
5598f05dee | ||
|
|
e932fc832c | ||
|
|
01b6830fd2 | ||
|
|
c1d0ac7b9d | ||
|
|
dce667fa07 | ||
|
|
a78361341e | ||
|
|
c87d6e4720 | ||
|
|
71346760d0 | ||
|
|
f6c1f336d4 | ||
|
|
638c958acd | ||
|
|
b2231945dc | ||
|
|
4dbfafa8ca | ||
|
|
56b8da133c | ||
|
|
06af6f19a3 | ||
|
|
5f37852695 | ||
|
|
5fe8a43e36 | ||
|
|
760b62e068 | ||
|
|
9205334235 | ||
|
|
ca4c6ffe8d | ||
|
|
b47b555c4f | ||
|
|
5d2f97de74 | ||
|
|
9e0636eefa | ||
|
|
6ae1228e61 | ||
|
|
29311e6391 | ||
|
|
bd4f66fda3 | ||
|
|
dc89499116 | ||
|
|
41b58b8bc1 | ||
|
|
58df05a7e7 | ||
|
|
fb940e4269 | ||
|
|
26fc57d1b3 | ||
|
|
da57f92796 | ||
|
|
236820d093 | ||
|
|
87712b9fa5 | ||
|
|
510d6d7874 | ||
|
|
8830054fad | ||
|
|
327fe63047 | ||
|
|
0f5c9b4af3 | ||
|
|
9813396880 | ||
|
|
f5f86993f1 | ||
|
|
d4fc22add4 | ||
|
|
d699a550c8 | ||
|
|
f71d4312e2 | ||
|
|
ec777a802c | ||
|
|
82cad58b8d | ||
|
|
34231383ec | ||
|
|
6e14e8ed91 | ||
|
|
4aedd3a09a | ||
|
|
26dea0f247 | ||
|
|
0792e72f71 | ||
|
|
d9420c1f73 | ||
|
|
ee1884423a | ||
|
|
17480a0398 | ||
|
|
75ec855822 | ||
|
|
2c5080e382 | ||
|
|
48e9742658 | ||
|
|
14b62120fd | ||
|
|
4a8149627e | ||
|
|
9c85ba5b66 | ||
|
|
fb0cb43261 | ||
|
|
23722dc291 | ||
|
|
e841f568c1 | ||
|
|
9b096322e1 | ||
|
|
df32a81165 | ||
|
|
8924d657a4 | ||
|
|
98ba529ead | ||
|
|
9a01cd84c2 | ||
|
|
09c6f57364 | ||
|
|
a807572382 | ||
|
|
dc6a44d0eb | ||
|
|
c296e9b9bb | ||
|
|
d22bb8fc7d | ||
|
|
b99275f6a5 | ||
|
|
57502bc911 | ||
|
|
128e66fa24 | ||
|
|
0132ac3c27 | ||
|
|
cfd8d70890 | ||
|
|
44d2871dc9 | ||
|
|
821e3beab0 | ||
|
|
a439e087e1 | ||
|
|
b8acbf3c3a | ||
|
|
d25214beb1 | ||
|
|
22d9bee41a | ||
|
|
a6eef22fbc | ||
|
|
f189367c02 | ||
|
|
40fa4463de | ||
|
|
729df112a7 | ||
|
|
9b52b9bf66 | ||
|
|
c6d5a5a6cc | ||
|
|
7f169e97ca | ||
|
|
560161bdbb | ||
|
|
1761a71338 | ||
|
|
3da3612c7b | ||
|
|
198432f222 | ||
|
|
a868685ac9 | ||
|
|
d4cab60343 | ||
|
|
da12ceae5b | ||
|
|
79b10612aa | ||
|
|
d5edbb424a | ||
|
|
d527e2c926 | ||
|
|
b899dd59c5 | ||
|
|
8f928982e0 | ||
|
|
8f243ad59d | ||
|
|
c9453bab19 | ||
|
|
78b7ed0ebe | ||
|
|
d468d0f71b | ||
|
|
6bc636c2f2 | ||
|
|
43a6be6471 | ||
|
|
d9f2a406f6 | ||
|
|
0bdbf007b2 | ||
|
|
f1cbb2a0b3 | ||
|
|
ecfbfb4527 | ||
|
|
d8690f426c | ||
|
|
39f2e49451 | ||
|
|
58f14c5fe2 | ||
|
|
319ac23736 | ||
|
|
86e50530b0 | ||
|
|
7881081207 | ||
|
|
8623294fcd | ||
|
|
76537a7f41 | ||
|
|
d85ae5dcae | ||
|
|
4e0683565d | ||
|
|
4d6c07f18a | ||
|
|
b0c68e0ea7 | ||
|
|
2858f56d4d | ||
|
|
c5d443a710 | ||
|
|
96af0cffc8 | ||
|
|
114af8e24b | ||
|
|
14752baf27 | ||
|
|
f2962a0d16 | ||
|
|
6ea92f86a5 | ||
|
|
55997c74b0 | ||
|
|
4e066f4681 | ||
|
|
dbc4f285f1 | ||
|
|
f5da0e341c | ||
|
|
21c96fa76c | ||
|
|
c1d441b0ac | ||
|
|
d63c44f778 | ||
|
|
03bb3d9ddc | ||
|
|
9413b5a415 | ||
|
|
08e2959742 | ||
|
|
6d9f1b3fd3 | ||
|
|
a89c8eeabe | ||
|
|
f382be4c15 | ||
|
|
ca70b96005 | ||
|
|
37602647aa | ||
|
|
d22c3f13b2 | ||
|
|
024ce0e8eb | ||
|
|
ee5540f351 | ||
|
|
e669e1e2bf | ||
|
|
227b8bdf8a | ||
|
|
76549beb96 | ||
|
|
2e848c3f1f | ||
|
|
266b3bc714 | ||
|
|
f3e4e8dce8 | ||
|
|
73008885c8 | ||
|
|
1460f7bd80 | ||
|
|
f722a6c08d | ||
|
|
cb5426c1fa | ||
|
|
7564d1fb52 | ||
|
|
a02b69db38 | ||
|
|
5ab1996d3f | ||
|
|
ffce593cc8 | ||
|
|
56155740fe | ||
|
|
0a13c47a8c | ||
|
|
d2022cae28 | ||
|
|
05bb645263 | ||
|
|
ddeb6b6baa | ||
|
|
08eca4a237 | ||
|
|
1e248551d5 | ||
|
|
c173a3be44 | ||
|
|
b782ed6bbb | ||
|
|
a0b1b2e254 | ||
|
|
c629f24f07 | ||
|
|
6b3c740dc3 | ||
|
|
616301f7ee | ||
|
|
e9b0f54a43 | ||
|
|
aa8ddeca34 | ||
|
|
fe8a330a45 | ||
|
|
f9b3ba2887 | ||
|
|
50d282ff37 | ||
|
|
970b00b8d6 | ||
|
|
92816b57ef | ||
|
|
9a8b945118 | ||
|
|
d8f5e9b878 | ||
|
|
b0e6f34976 | ||
|
|
9aeb75f28d | ||
|
|
8951c80225 | ||
|
|
6c5124e12a | ||
|
|
08591dae0e | ||
|
|
6d3c3ce449 | ||
|
|
3d03a86b13 | ||
|
|
7e2278f1cc | ||
|
|
416ff10ba9 | ||
|
|
aa91211229 | ||
|
|
cc1de3191f | ||
|
|
10c8f21f79 | ||
|
|
3da0f5e384 | ||
|
|
7260cada90 | ||
|
|
73d6dc6b6a | ||
|
|
4627d2c1fb | ||
|
|
f54ad26630 | ||
|
|
4c328e4959 | ||
|
|
1efccf2d90 | ||
|
|
6badd83c5d | ||
|
|
b817609adc | ||
|
|
61f4c73aca | ||
|
|
24e1a568a2 | ||
|
|
06ca04c1c8 | ||
|
|
5698173c76 | ||
|
|
d7fcb5268a | ||
|
|
d041c62f55 | ||
|
|
0eb387916f | ||
|
|
b87c541d3a | ||
|
|
a6a3555684 | ||
|
|
7559e70027 | ||
|
|
8fcfcc40fc | ||
|
|
c2218e8a64 | ||
|
|
0a7919a279 | ||
|
|
046a4fc401 | ||
|
|
70bbb867f9 | ||
|
|
ae5f284d10 | ||
|
|
6ea0575a4a | ||
|
|
d88d57f3bb | ||
|
|
bd80346592 | ||
|
|
7292f2be69 | ||
|
|
21d04b3e14 | ||
|
|
3c6235bee5 | ||
|
|
b0985bb459 | ||
|
|
8e93d0a7a2 | ||
|
|
820b381a8d | ||
|
|
236c5deeee | ||
|
|
935240f8c3 | ||
|
|
168f20bdf4 | ||
|
|
1810e459ee | ||
|
|
d86837cc4d | ||
|
|
af926db211 | ||
|
|
20ba80f934 | ||
|
|
34e3d2f997 | ||
|
|
fadfb89b4c | ||
|
|
84e6813779 | ||
|
|
4921d35e70 | ||
|
|
cebb146e7c | ||
|
|
4e6b133a17 | ||
|
|
0a5966c283 | ||
|
|
3f6a30a974 | ||
|
|
0db27f1cef | ||
|
|
628264be4e | ||
|
|
d286723087 | ||
|
|
fb3d66e6e1 | ||
|
|
795300848c | ||
|
|
b3b2e8ffb7 | ||
|
|
6a4bf1f817 | ||
|
|
7c27bab3c7 | ||
|
|
accfedce87 | ||
|
|
4cb0ff1f63 | ||
|
|
896eaba2d6 | ||
|
|
d648eb1e4f | ||
|
|
848a2a95a8 | ||
|
|
9235b52828 | ||
|
|
3fa84039f8 | ||
|
|
929f3c2594 | ||
|
|
95d460c8bd | ||
|
|
67e87f9048 | ||
|
|
9924dd7aca | ||
|
|
3ac8c6d1fe | ||
|
|
b179dbcdcf | ||
|
|
282b4f4927 | ||
|
|
b68a796c7c | ||
|
|
48276b041c | ||
|
|
bfafe9ccbe | ||
|
|
4cb1d77783 | ||
|
|
787bd75587 | ||
|
|
dc93779f02 | ||
|
|
14066dfb5a | ||
|
|
7d9988fd75 | ||
|
|
2fed016347 | ||
|
|
d1b82e9ede | ||
|
|
b8e20fcadf | ||
|
|
ebc09017b8 | ||
|
|
798b72e164 | ||
|
|
f77514c6f2 | ||
|
|
7887d6d6e4 | ||
|
|
0dc0706eb2 | ||
|
|
b30f4b8fc0 | ||
|
|
233bc1a108 | ||
|
|
61dabae6ab | ||
|
|
d858e1be05 | ||
|
|
4c3f39be02 | ||
|
|
b5ada3bf10 | ||
|
|
a3794b3241 | ||
|
|
952d72fdd3 | ||
|
|
5a9db70d24 | ||
|
|
17b59cd410 | ||
|
|
eb3e53e2d3 | ||
|
|
8af0747f95 | ||
|
|
ceac04b82d | ||
|
|
e93fbcf701 | ||
|
|
337cd40cb6 | ||
|
|
3664f61e2d | ||
|
|
1acd34313b | ||
|
|
888c5172bf | ||
|
|
3d802afecb | ||
|
|
1647ebaf31 | ||
|
|
ae1511d8f6 | ||
|
|
85f4cecc64 | ||
|
|
203c3a5175 | ||
|
|
846d31c4f1 | ||
|
|
cb460a85ba | ||
|
|
592d30d495 | ||
|
|
a79224aba8 | ||
|
|
7c5da67d74 | ||
|
|
b71baef7c8 | ||
|
|
2c341f2a65 | ||
|
|
1c1363875c | ||
|
|
156ab7dc2b | ||
|
|
4db0e7888a | ||
|
|
e98054accb | ||
|
|
7771ecfe58 | ||
|
|
6cd9667364 | ||
|
|
bf7e09ce59 | ||
|
|
32844bb318 | ||
|
|
1bca313421 | ||
|
|
984d41e334 | ||
|
|
fcfbdd2d89 | ||
|
|
4ec2af785a | ||
|
|
0eba920075 | ||
|
|
8f4bb8d445 | ||
|
|
3b8f254dfd | ||
|
|
64d6fa8e86 | ||
|
|
3b4a9a337b | ||
|
|
ae1bcd5fef | ||
|
|
9fb1f2fa17 | ||
|
|
d261c6ccc1 | ||
|
|
9ca5bdda7f | ||
|
|
6cc1bf37cc | ||
|
|
f5db7707bb | ||
|
|
859ae2fbad | ||
|
|
96a51d16a7 | ||
|
|
09292d5918 | ||
|
|
f62d473fc4 | ||
|
|
607b44f7c0 | ||
|
|
d78e132007 | ||
|
|
8d3c9bc2d0 | ||
|
|
6d4545cb3e | ||
|
|
c311e480fd | ||
|
|
4c6ddd435c | ||
|
|
d31140f8cd | ||
|
|
0ed9e185b2 | ||
|
|
408ae44bdd | ||
|
|
bf9c2c74fa | ||
|
|
ce93a332a7 | ||
|
|
bc15f11473 | ||
|
|
fccbd41203 | ||
|
|
17b3d3a8e4 | ||
|
|
279192d317 | ||
|
|
701d258076 | ||
|
|
034bbb4f5f | ||
|
|
2943ad15a5 | ||
|
|
13c3833593 | ||
|
|
d0715c75c0 | ||
|
|
eca424656a | ||
|
|
3b60081e2a | ||
|
|
fbfaa41cb0 | ||
|
|
df1da7554c | ||
|
|
6d280084fb | ||
|
|
1096fe3d87 | ||
|
|
389da16947 | ||
|
|
185af1b42a | ||
|
|
d17f27b65c | ||
|
|
bb0867f1a8 | ||
|
|
ac788a7ee7 | ||
|
|
bf52aa8ccc | ||
|
|
d7c8adc085 | ||
|
|
8b4ef3bbdd | ||
|
|
b67d32824c | ||
|
|
14c0ada9ac | ||
|
|
618039734a | ||
|
|
0d5e151c60 | ||
|
|
bad920fa87 | ||
|
|
281fe93a26 | ||
|
|
4a71593ffd | ||
|
|
014cc14b7e | ||
|
|
ee71d2ca60 | ||
|
|
5085ce8ab1 | ||
|
|
9ed5b70d01 | ||
|
|
976bf3e979 | ||
|
|
0b70419859 | ||
|
|
6f903db8c4 | ||
|
|
4c88578371 | ||
|
|
b1dcfaf6b3 | ||
|
|
449a7d3fd5 | ||
|
|
7fd2e67d11 | ||
|
|
34260ed09f | ||
|
|
a00d8a493d | ||
|
|
263c0322ee | ||
|
|
2b0e56932b | ||
|
|
e12cef8d77 | ||
|
|
704cdac874 | ||
|
|
89d7c0af91 | ||
|
|
5f3bcedbba | ||
|
|
d2d3f27f85 | ||
|
|
6795db9bd6 | ||
|
|
6a693546a3 | ||
|
|
a8c73ffb93 | ||
|
|
411e36b0f8 | ||
|
|
fbfc674ca5 | ||
|
|
ca20b0cf17 | ||
|
|
05454b76a6 | ||
|
|
b4c858bcdf | ||
|
|
4d4fd19f87 | ||
|
|
16a846b1e7 | ||
|
|
034b0e07d2 | ||
|
|
c486f794f9 | ||
|
|
9220270948 | ||
|
|
22f68d70a7 | ||
|
|
bf85e18d45 | ||
|
|
09c43e8854 | ||
|
|
7be7d3ffac | ||
|
|
2823ef84db | ||
|
|
4d07448cf8 | ||
|
|
12d59797a7 | ||
|
|
673290d2e1 | ||
|
|
5a81ddd4e7 | ||
|
|
ef820c3126 | ||
|
|
278b9d0f71 | ||
|
|
276ab191b5 | ||
|
|
e5cbf01ce1 | ||
|
|
fe2e5089ab | ||
|
|
35ffac1e01 | ||
|
|
362f23a950 | ||
|
|
dc8d4ac8e4 | ||
|
|
0cdea28e2a | ||
|
|
7d1a02feb1 | ||
|
|
b90636f640 | ||
|
|
b4374c8c4c | ||
|
|
3076866ec6 | ||
|
|
e6a54013dc | ||
|
|
3edc58a04e | ||
|
|
70fe4f22db | ||
|
|
9f1dc71320 | ||
|
|
f43eca248a | ||
|
|
958b894020 | ||
|
|
0ba2b4e253 | ||
|
|
1e6b91b05a | ||
|
|
5c8f209aa7 | ||
|
|
d966e0cfce | ||
|
|
3eeccc1a65 | ||
|
|
52e33c2aa2 | ||
|
|
35f5784287 | ||
|
|
46cc6e199b | ||
|
|
6371eca14d | ||
|
|
052641e620 | ||
|
|
16edcd9938 | ||
|
|
4fa6f2e54f | ||
|
|
3c1cdecb88 | ||
|
|
18286dbf4b | ||
|
|
3a0616c680 | ||
|
|
440e4289e4 | ||
|
|
8fe1a84db2 | ||
|
|
5fa66ba4a3 | ||
|
|
261f3bcba6 | ||
|
|
9be1b72ed7 | ||
|
|
5610541515 | ||
|
|
bfc8d2457c | ||
|
|
dedc2ef918 | ||
|
|
a9c85b9944 | ||
|
|
bf91a8c1b3 | ||
|
|
6f299e7245 | ||
|
|
1ad495070d | ||
|
|
84719d944a | ||
|
|
4ca588deae | ||
|
|
325001933d | ||
|
|
acc9fd0382 | ||
|
|
f32d1c0dea | ||
|
|
ca89d6184c | ||
|
|
2bfe7aa219 | ||
|
|
6fcd56c462 | ||
|
|
1ce2d97d3d | ||
|
|
04c5cda7e5 | ||
|
|
7692cffdbe | ||
|
|
7c093bd928 | ||
|
|
bcee3f9570 | ||
|
|
78ffb6f3e6 | ||
|
|
d1aa4f42e5 | ||
|
|
e7d34913c0 | ||
|
|
1a3a38d370 | ||
|
|
59ce31f44f | ||
|
|
b3d8f8620c | ||
|
|
3eebb9d51d | ||
|
|
b6bb6919e6 | ||
|
|
c08862679d | ||
|
|
50db622689 | ||
|
|
9303a56d8f | ||
|
|
6667138b73 | ||
|
|
d9852bc75d | ||
|
|
6aeccf0330 | ||
|
|
f8572c1d71 | ||
|
|
e2e001d042 | ||
|
|
e3307213b1 | ||
|
|
84baaa324c | ||
|
|
42ee8eef50 | ||
|
|
3fef9a93cf | ||
|
|
4b256f3466 | ||
|
|
bebfc3d16e | ||
|
|
fd3902f7e7 | ||
|
|
dfb992adb2 | ||
|
|
a252065f99 | ||
|
|
6947f8cb2e | ||
|
|
85dfea1642 | ||
|
|
015c8811a5 | ||
|
|
d9c78b77cb | ||
|
|
1b543cf538 | ||
|
|
9fb8144031 | ||
|
|
f2033c418f | ||
|
|
aa266cb630 | ||
|
|
9a5d783537 | ||
|
|
5800b57791 | ||
|
|
c840771c0a | ||
|
|
9678752480 | ||
|
|
31b2f331db | ||
|
|
0ba54ee9b7 | ||
|
|
5c86a51b45 | ||
|
|
9debbfb1a8 | ||
|
|
97b671171b | ||
|
|
179fb0f3b5 | ||
|
|
96b7bb625d | ||
|
|
afeb13d980 | ||
|
|
6e1728542e | ||
|
|
9438dd1cbd | ||
|
|
0194905e97 | ||
|
|
25505dc1d4 | ||
|
|
ce219ac6c7 | ||
|
|
fa20957e01 | ||
|
|
7959c04d1e | ||
|
|
e6d7f6ed71 | ||
|
|
144b530045 | ||
|
|
39ba99005a | ||
|
|
f867b025e5 | ||
|
|
c928f82cbf | ||
|
|
9d7aa8f05d | ||
|
|
02f927ae2d | ||
|
|
1d022522cd | ||
|
|
bad9ac5395 | ||
|
|
e9f561e7ab | ||
|
|
14d169558f | ||
|
|
ca2a68217d | ||
|
|
0a9a8ecc4e | ||
|
|
6cef850497 | ||
|
|
66af4bd011 | ||
|
|
03253f4598 | ||
|
|
aa5d8e5a81 | ||
|
|
206029eadc | ||
|
|
958c5ecbfe | ||
|
|
3d79bf2bfe | ||
|
|
1de0a0bbb9 | ||
|
|
8d22479d24 | ||
|
|
7f7435f003 | ||
|
|
d2eb5bb0f3 | ||
|
|
085303c349 | ||
|
|
9ac6f906ff | ||
|
|
f995ab9d54 | ||
|
|
77f595c9a4 | ||
|
|
8d0b1588be | ||
|
|
70c5c82541 | ||
|
|
bf910ef383 | ||
|
|
99c49c0993 | ||
|
|
f6e6c21ba6 | ||
|
|
41b7f5ab1c | ||
|
|
c5bd6b3d6b | ||
|
|
9e96397e6a | ||
|
|
f207e01510 | ||
|
|
6b3bb3347b | ||
|
|
806903ffe0 | ||
|
|
fdf1fa48e3 | ||
|
|
636077c74d | ||
|
|
e047e4dcff | ||
|
|
eae306c3f1 | ||
|
|
fbd7c72283 | ||
|
|
fc58746bc3 | ||
|
|
9ae878d8f2 | ||
|
|
afe9fc221e | ||
|
|
eb912be47a | ||
|
|
5c346e8fb6 | ||
|
|
8d388c5e79 | ||
|
|
314574fc84 | ||
|
|
e356d0bcda | ||
|
|
f991ec15f2 | ||
|
|
d7d83c683d | ||
|
|
ff867a7d57 | ||
|
|
1282370ccb | ||
|
|
eebd094423 | ||
|
|
57bd4185d4 | ||
|
|
91ba35c68e | ||
|
|
a99e15343c | ||
|
|
4583638b92 | ||
|
|
10a1b156e3 | ||
|
|
a8286535eb | ||
|
|
05146badf1 | ||
|
|
c483e4479e | ||
|
|
4a70c725b4 | ||
|
|
33ed017851 | ||
|
|
fffc4dd3fd | ||
|
|
e072981295 | ||
|
|
5d983d0b61 | ||
|
|
727f667cbc | ||
|
|
1b4fc2ae8d | ||
|
|
5b0d1415ad | ||
|
|
7818c98c67 | ||
|
|
e12222697c | ||
|
|
58f28f177d | ||
|
|
6030e419c5 | ||
|
|
8d2a784831 | ||
|
|
5dc841ecae | ||
|
|
edf34eea94 | ||
|
|
a303f67d3b | ||
|
|
1b5f526e09 | ||
|
|
03a0a3572b | ||
|
|
297d24c5b0 | ||
|
|
c8cf06b8b7 | ||
|
|
49d6d7c656 | ||
|
|
96fd874090 | ||
|
|
c9703872e2 | ||
|
|
2f5d7d4522 | ||
|
|
7716e8fb68 | ||
|
|
c2fc8a0d61 | ||
|
|
9be384690a | ||
|
|
692eeb3687 | ||
|
|
213c91ae73 | ||
|
|
36b1a89f93 | ||
|
|
584bfbaa76 | ||
|
|
0f140751b2 | ||
|
|
6b359c95da | ||
|
|
1fec64a1b3 | ||
|
|
70ed58a78d | ||
|
|
6aa9844f8f | ||
|
|
7a4238095d | ||
|
|
177594f02c | ||
|
|
cf89f45697 | ||
|
|
2dc78e6f0c | ||
|
|
9da74dda43 | ||
|
|
18149dcb8c | ||
|
|
3f841a36a5 | ||
|
|
80ae02cc49 | ||
|
|
421b2962c6 | ||
|
|
bde5a9ef01 | ||
|
|
b79886ad85 | ||
|
|
94a2fd542e | ||
|
|
6fa8556033 | ||
|
|
19cfa8cf22 | ||
|
|
a859997190 | ||
|
|
6f9860b25e | ||
|
|
128ce589e1 | ||
|
|
9b21774392 | ||
|
|
eaf4a75402 | ||
|
|
a1a6d4a631 | ||
|
|
de1fd5a7fa | ||
|
|
0d96095646 | ||
|
|
45085dd97f | ||
|
|
b2a1204bc5 | ||
|
|
990a9e80a2 | ||
|
|
0ffcc197d4 | ||
|
|
1a051f038d | ||
|
|
1e22c8daca | ||
|
|
b8cbd39985 | ||
|
|
3508622e3b | ||
|
|
b8f6d824fd | ||
|
|
e687848152 | ||
|
|
2a9fd9ae26 | ||
|
|
3ec4070d8c | ||
|
|
6f8038992c | ||
|
|
5c9a58f3e6 | ||
|
|
d34214ad32 | ||
|
|
2b7021407c | ||
|
|
03cd4480df | ||
|
|
910825580e | ||
|
|
c8d479e594 | ||
|
|
34f6245e74 | ||
|
|
e9ea5c2ccb | ||
|
|
4347a0f6b7 | ||
|
|
5888e32360 | ||
|
|
4214a354a7 | ||
|
|
369afd7ddd | ||
|
|
281445917b | ||
|
|
df6846344d | ||
|
|
05960fa29c | ||
|
|
068749bcbe | ||
|
|
f21f32778f | ||
|
|
8ef3c6d4d3 | ||
|
|
c7a78ed522 | ||
|
|
45adb5c9c7 | ||
|
|
4004867eda | ||
|
|
118d3bc11c | ||
|
|
0e9d71f232 | ||
|
|
b552fbe312 |
@@ -1,272 +0,0 @@
|
||||
# Python CircleCI 2.0 configuration file
|
||||
#
|
||||
# Check https://circleci.com/docs/2.0/language-python/ for more details
|
||||
#
|
||||
version: 2.1
|
||||
|
||||
executors:
|
||||
|
||||
python:
|
||||
parameters:
|
||||
tag:
|
||||
type: string
|
||||
default: latest
|
||||
docker:
|
||||
- image: circleci/python:<< parameters.tag >>
|
||||
- image: circleci/buildpack-deps:stretch
|
||||
working_directory: ~/repo
|
||||
|
||||
commands:
|
||||
|
||||
docker-prereqs:
|
||||
description: Set up docker prerequisite requirement
|
||||
steps:
|
||||
- run: sudo apt-get update && sudo apt-get install -y --no-install-recommends
|
||||
libudev-dev libavformat-dev libavcodec-dev libavdevice-dev libavutil-dev
|
||||
libswscale-dev libswresample-dev libavfilter-dev
|
||||
|
||||
install-requirements:
|
||||
description: Set up venv and install requirements python packages with cache support
|
||||
parameters:
|
||||
python:
|
||||
type: string
|
||||
default: latest
|
||||
all:
|
||||
description: pip install -r requirements_all.txt
|
||||
type: boolean
|
||||
default: false
|
||||
test:
|
||||
description: pip install -r requirements_test.txt
|
||||
type: boolean
|
||||
default: false
|
||||
test_all:
|
||||
description: pip install -r requirements_test_all.txt
|
||||
type: boolean
|
||||
default: false
|
||||
steps:
|
||||
- restore_cache:
|
||||
keys:
|
||||
- v1-<< parameters.python >>-{{ checksum "homeassistant/package_constraints.txt" }}-<<# parameters.all >>{{ checksum "requirements_all.txt" }}<</ parameters.all>>-<<# parameters.test >>{{ checksum "requirements_test.txt" }}<</ parameters.test>>-<<# parameters.test_all >>{{ checksum "requirements_test_all.txt" }}<</ parameters.test_all>>
|
||||
- run:
|
||||
name: install dependencies
|
||||
command: |
|
||||
python3 -m venv venv
|
||||
. venv/bin/activate
|
||||
pip install -q -U pip
|
||||
pip install -q -U setuptools
|
||||
<<# parameters.all >>pip install -q --progress-bar off -r requirements_all.txt -c homeassistant/package_constraints.txt<</ parameters.all>>
|
||||
<<# parameters.test >>pip install -q --progress-bar off -r requirements_test.txt -c homeassistant/package_constraints.txt<</ parameters.test>>
|
||||
<<# parameters.test_all >>pip install -q --progress-bar off -r requirements_test_all.txt -c homeassistant/package_constraints.txt<</ parameters.test_all>>
|
||||
no_output_timeout: 15m
|
||||
- save_cache:
|
||||
paths:
|
||||
- ./venv
|
||||
key: v1-<< parameters.python >>-{{ checksum "homeassistant/package_constraints.txt" }}-<<# parameters.all >>{{ checksum "requirements_all.txt" }}<</ parameters.all>>-<<# parameters.test >>{{ checksum "requirements_test.txt" }}<</ parameters.test>>-<<# parameters.test_all >>{{ checksum "requirements_test_all.txt" }}<</ parameters.test_all>>
|
||||
|
||||
install:
|
||||
description: Install Home Assistant
|
||||
steps:
|
||||
- run:
|
||||
name: install
|
||||
command: |
|
||||
. venv/bin/activate
|
||||
pip install -q --progress-bar off -e .
|
||||
|
||||
jobs:
|
||||
|
||||
static-check:
|
||||
executor:
|
||||
name: python
|
||||
tag: 3.5.5-stretch
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
- docker-prereqs
|
||||
- install-requirements:
|
||||
python: 3.5.5-stretch
|
||||
test: true
|
||||
|
||||
- run:
|
||||
name: run static check
|
||||
command: |
|
||||
. venv/bin/activate
|
||||
flake8 homeassistant tests script
|
||||
|
||||
- run:
|
||||
name: run static type check
|
||||
command: |
|
||||
. venv/bin/activate
|
||||
TYPING_FILES=$(cat mypyrc)
|
||||
mypy $TYPING_FILES
|
||||
|
||||
- install
|
||||
|
||||
- run:
|
||||
name: validate manifests
|
||||
command: |
|
||||
. venv/bin/activate
|
||||
python -m script.hassfest validate
|
||||
|
||||
- run:
|
||||
name: run gen_requirements_all
|
||||
command: |
|
||||
. venv/bin/activate
|
||||
python script/gen_requirements_all.py validate
|
||||
|
||||
pre-install-all-requirements:
|
||||
executor:
|
||||
name: python
|
||||
tag: 3.5.5-stretch
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
- docker-prereqs
|
||||
- install-requirements:
|
||||
python: 3.5.5-stretch
|
||||
all: true
|
||||
test: true
|
||||
|
||||
pylint:
|
||||
executor:
|
||||
name: python
|
||||
tag: 3.5.5-stretch
|
||||
parallelism: 2
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
- docker-prereqs
|
||||
- install-requirements:
|
||||
python: 3.5.5-stretch
|
||||
all: true
|
||||
test: true
|
||||
- install
|
||||
|
||||
- run:
|
||||
name: run pylint
|
||||
command: |
|
||||
. venv/bin/activate
|
||||
PYFILES=$(circleci tests glob "homeassistant/**/*.py" | circleci tests split)
|
||||
pylint ${PYFILES}
|
||||
no_output_timeout: 15m
|
||||
|
||||
pre-test:
|
||||
parameters:
|
||||
python:
|
||||
type: string
|
||||
executor:
|
||||
name: python
|
||||
tag: << parameters.python >>
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
- docker-prereqs
|
||||
- install-requirements:
|
||||
python: << parameters.python >>
|
||||
test_all: true
|
||||
|
||||
test:
|
||||
parameters:
|
||||
python:
|
||||
type: string
|
||||
executor:
|
||||
name: python
|
||||
tag: << parameters.python >>
|
||||
parallelism: 2
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
- docker-prereqs
|
||||
- install-requirements:
|
||||
python: << parameters.python >>
|
||||
test_all: true
|
||||
- install
|
||||
|
||||
- run:
|
||||
name: run tests with code coverage
|
||||
command: |
|
||||
. venv/bin/activate
|
||||
CC_SWITCH="--cov --cov-report="
|
||||
TESTFILES=$(circleci tests glob "tests/**/test_*.py" | circleci tests split --split-by=timings)
|
||||
pytest --timeout=9 --durations=10 --junitxml=test-reports/homeassistant/results.xml -qq -o junit_family=xunit2 -o junit_suite_name=homeassistant -o console_output_style=count -p no:sugar $CC_SWITCH -- ${TESTFILES}
|
||||
script/check_dirty
|
||||
codecov
|
||||
|
||||
- store_test_results:
|
||||
path: test-reports
|
||||
|
||||
- store_artifacts:
|
||||
path: htmlcov
|
||||
destination: cov-reports
|
||||
|
||||
- store_artifacts:
|
||||
path: test-reports
|
||||
destination: test-reports
|
||||
|
||||
# This job use machine executor, e.g. classic CircleCI VM because we need both lokalise-cli and a Python runtime.
|
||||
# Classic CircleCI included python 2.7.12 and python 3.5.2 managed by pyenv, the Python version may need change if
|
||||
# CircleCI changed its VM in future.
|
||||
upload-translations:
|
||||
machine: true
|
||||
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
- run:
|
||||
name: upload english translations
|
||||
command: |
|
||||
pyenv versions
|
||||
pyenv global 3.5.2
|
||||
docker pull lokalise/lokalise-cli@sha256:2198814ebddfda56ee041a4b427521757dd57f75415ea9693696a64c550cef21
|
||||
script/translations_upload
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- static-check
|
||||
- pre-install-all-requirements:
|
||||
requires:
|
||||
- static-check
|
||||
- pylint:
|
||||
requires:
|
||||
- pre-install-all-requirements
|
||||
- pre-test:
|
||||
name: pre-test 3.5.5
|
||||
requires:
|
||||
- static-check
|
||||
python: 3.5.5-stretch
|
||||
- pre-test:
|
||||
name: pre-test 3.6
|
||||
requires:
|
||||
- static-check
|
||||
python: 3.6-stretch
|
||||
- pre-test:
|
||||
name: pre-test 3.7
|
||||
requires:
|
||||
- static-check
|
||||
python: 3.7-stretch
|
||||
- test:
|
||||
name: test 3.5.5
|
||||
requires:
|
||||
- pre-test 3.5.5
|
||||
python: 3.5.5-stretch
|
||||
- test:
|
||||
name: test 3.6
|
||||
requires:
|
||||
- pre-test 3.6
|
||||
python: 3.6-stretch
|
||||
- test:
|
||||
name: test 3.7
|
||||
requires:
|
||||
- pre-test 3.7
|
||||
python: 3.7-stretch
|
||||
# CircleCI does not allow failure yet
|
||||
# - test:
|
||||
# name: test 3.8
|
||||
# python: 3.8-rc-stretch
|
||||
- upload-translations:
|
||||
requires:
|
||||
- static-check
|
||||
filters:
|
||||
branches:
|
||||
only: dev
|
||||
61
.coveragerc
61
.coveragerc
@@ -13,6 +13,10 @@ omit =
|
||||
homeassistant/components/abode/*
|
||||
homeassistant/components/acer_projector/switch.py
|
||||
homeassistant/components/actiontec/device_tracker.py
|
||||
homeassistant/components/adguard/__init__.py
|
||||
homeassistant/components/adguard/const.py
|
||||
homeassistant/components/adguard/sensor.py
|
||||
homeassistant/components/adguard/switch.py
|
||||
homeassistant/components/ads/*
|
||||
homeassistant/components/aftership/sensor.py
|
||||
homeassistant/components/airvisual/sensor.py
|
||||
@@ -30,10 +34,13 @@ omit =
|
||||
homeassistant/components/androidtv/*
|
||||
homeassistant/components/anel_pwrctrl/switch.py
|
||||
homeassistant/components/anthemav/media_player.py
|
||||
homeassistant/components/apache_kafka/*
|
||||
homeassistant/components/apcupsd/*
|
||||
homeassistant/components/apple_tv/*
|
||||
homeassistant/components/aqualogic/*
|
||||
homeassistant/components/aquostv/media_player.py
|
||||
homeassistant/components/arcam_fmj/media_player.py
|
||||
homeassistant/components/arcam_fmj/__init__.py
|
||||
homeassistant/components/arduino/*
|
||||
homeassistant/components/arest/binary_sensor.py
|
||||
homeassistant/components/arest/sensor.py
|
||||
@@ -45,8 +52,11 @@ omit =
|
||||
homeassistant/components/asterisk_mbox/*
|
||||
homeassistant/components/asuswrt/device_tracker.py
|
||||
homeassistant/components/august/*
|
||||
homeassistant/components/aurora_abb_powerone/sensor.py
|
||||
homeassistant/components/automatic/device_tracker.py
|
||||
homeassistant/components/avea/light.py
|
||||
homeassistant/components/avion/light.py
|
||||
homeassistant/components/azure_event_hub/*
|
||||
homeassistant/components/baidu/tts.py
|
||||
homeassistant/components/bbb_gpio/*
|
||||
homeassistant/components/bbox/device_tracker.py
|
||||
@@ -113,6 +123,7 @@ omit =
|
||||
homeassistant/components/ddwrt/device_tracker.py
|
||||
homeassistant/components/decora/light.py
|
||||
homeassistant/components/decora_wifi/light.py
|
||||
homeassistant/components/delijn/*
|
||||
homeassistant/components/deluge/sensor.py
|
||||
homeassistant/components/deluge/switch.py
|
||||
homeassistant/components/denon/media_player.py
|
||||
@@ -152,6 +163,7 @@ omit =
|
||||
homeassistant/components/eight_sleep/*
|
||||
homeassistant/components/eliqonline/sensor.py
|
||||
homeassistant/components/elkm1/*
|
||||
homeassistant/components/elv/switch.py
|
||||
homeassistant/components/emby/media_player.py
|
||||
homeassistant/components/emoncms/sensor.py
|
||||
homeassistant/components/emoncms_history/*
|
||||
@@ -160,6 +172,7 @@ omit =
|
||||
homeassistant/components/enocean/*
|
||||
homeassistant/components/enphase_envoy/sensor.py
|
||||
homeassistant/components/entur_public_transport/*
|
||||
homeassistant/components/environment_canada/*
|
||||
homeassistant/components/envirophat/sensor.py
|
||||
homeassistant/components/envisalink/*
|
||||
homeassistant/components/ephember/climate.py
|
||||
@@ -171,6 +184,7 @@ omit =
|
||||
homeassistant/components/esphome/camera.py
|
||||
homeassistant/components/esphome/climate.py
|
||||
homeassistant/components/esphome/cover.py
|
||||
homeassistant/components/esphome/entry_data.py
|
||||
homeassistant/components/esphome/fan.py
|
||||
homeassistant/components/esphome/light.py
|
||||
homeassistant/components/esphome/sensor.py
|
||||
@@ -189,6 +203,7 @@ omit =
|
||||
homeassistant/components/fints/sensor.py
|
||||
homeassistant/components/fitbit/sensor.py
|
||||
homeassistant/components/fixer/sensor.py
|
||||
homeassistant/components/fleetgo/device_tracker.py
|
||||
homeassistant/components/flexit/climate.py
|
||||
homeassistant/components/flic/binary_sensor.py
|
||||
homeassistant/components/flock/notify.py
|
||||
@@ -197,6 +212,8 @@ omit =
|
||||
homeassistant/components/folder/sensor.py
|
||||
homeassistant/components/folder_watcher/*
|
||||
homeassistant/components/foobot/sensor.py
|
||||
homeassistant/components/fortios/device_tracker.py
|
||||
homeassistant/components/fortigate/*
|
||||
homeassistant/components/foscam/camera.py
|
||||
homeassistant/components/foursquare/*
|
||||
homeassistant/components/free_mobile/notify.py
|
||||
@@ -206,6 +223,7 @@ omit =
|
||||
homeassistant/components/fritzbox_callmonitor/sensor.py
|
||||
homeassistant/components/fritzbox_netmonitor/sensor.py
|
||||
homeassistant/components/fritzdect/switch.py
|
||||
homeassistant/components/fronius/sensor.py
|
||||
homeassistant/components/frontier_silicon/media_player.py
|
||||
homeassistant/components/futurenow/light.py
|
||||
homeassistant/components/garadget/cover.py
|
||||
@@ -221,6 +239,7 @@ omit =
|
||||
homeassistant/components/goalfeed/*
|
||||
homeassistant/components/gogogate2/cover.py
|
||||
homeassistant/components/google/*
|
||||
homeassistant/components/google_cloud/tts.py
|
||||
homeassistant/components/google_maps/device_tracker.py
|
||||
homeassistant/components/google_travel_time/sensor.py
|
||||
homeassistant/components/googlehome/*
|
||||
@@ -250,7 +269,6 @@ omit =
|
||||
homeassistant/components/hitron_coda/device_tracker.py
|
||||
homeassistant/components/hive/*
|
||||
homeassistant/components/hlk_sw16/*
|
||||
homeassistant/components/homekit_controller/*
|
||||
homeassistant/components/homematic/*
|
||||
homeassistant/components/homematic/climate.py
|
||||
homeassistant/components/homematic/cover.py
|
||||
@@ -311,6 +329,7 @@ omit =
|
||||
homeassistant/components/lcn/*
|
||||
homeassistant/components/lg_netcast/media_player.py
|
||||
homeassistant/components/lg_soundbar/media_player.py
|
||||
homeassistant/components/life360/*
|
||||
homeassistant/components/lifx/*
|
||||
homeassistant/components/lifx_cloud/scene.py
|
||||
homeassistant/components/lifx_legacy/light.py
|
||||
@@ -344,6 +363,7 @@ omit =
|
||||
homeassistant/components/mastodon/notify.py
|
||||
homeassistant/components/matrix/*
|
||||
homeassistant/components/maxcube/*
|
||||
homeassistant/components/mcp23017/*
|
||||
homeassistant/components/media_extractor/*
|
||||
homeassistant/components/mediaroom/media_player.py
|
||||
homeassistant/components/message_bird/notify.py
|
||||
@@ -395,6 +415,8 @@ omit =
|
||||
homeassistant/components/nissan_leaf/*
|
||||
homeassistant/components/nmap_tracker/device_tracker.py
|
||||
homeassistant/components/nmbs/sensor.py
|
||||
homeassistant/components/notion/binary_sensor.py
|
||||
homeassistant/components/notion/sensor.py
|
||||
homeassistant/components/noaa_tides/sensor.py
|
||||
homeassistant/components/norway_air/air_quality.py
|
||||
homeassistant/components/nsw_fuel_station/sensor.py
|
||||
@@ -442,6 +464,7 @@ omit =
|
||||
homeassistant/components/ping/device_tracker.py
|
||||
homeassistant/components/pioneer/media_player.py
|
||||
homeassistant/components/pjlink/media_player.py
|
||||
homeassistant/components/plaato/*
|
||||
homeassistant/components/plex/media_player.py
|
||||
homeassistant/components/plex/sensor.py
|
||||
homeassistant/components/plum_lightpad/*
|
||||
@@ -453,8 +476,6 @@ omit =
|
||||
homeassistant/components/prometheus/*
|
||||
homeassistant/components/prowl/notify.py
|
||||
homeassistant/components/proxy/camera.py
|
||||
homeassistant/components/ps4/__init__.py
|
||||
homeassistant/components/ps4/media_player.py
|
||||
homeassistant/components/ptvsd/*
|
||||
homeassistant/components/pulseaudio_loopback/switch.py
|
||||
homeassistant/components/pushbullet/notify.py
|
||||
@@ -480,6 +501,7 @@ omit =
|
||||
homeassistant/components/rainmachine/binary_sensor.py
|
||||
homeassistant/components/rainmachine/sensor.py
|
||||
homeassistant/components/rainmachine/switch.py
|
||||
homeassistant/components/rainforest_eagle/sensor.py
|
||||
homeassistant/components/raspihats/*
|
||||
homeassistant/components/raspyrfm/*
|
||||
homeassistant/components/recollect_waste/sensor.py
|
||||
@@ -487,13 +509,15 @@ omit =
|
||||
homeassistant/components/reddit/*
|
||||
homeassistant/components/rejseplanen/sensor.py
|
||||
homeassistant/components/remember_the_milk/__init__.py
|
||||
homeassistant/components/repetier/__init__.py
|
||||
homeassistant/components/repetier/sensor.py
|
||||
homeassistant/components/remote_rpi_gpio/*
|
||||
homeassistant/components/rest/binary_sensor.py
|
||||
homeassistant/components/rest/notify.py
|
||||
homeassistant/components/rest/switch.py
|
||||
homeassistant/components/rfxtrx/*
|
||||
homeassistant/components/ring/camera.py
|
||||
homeassistant/components/ripple/sensor.py
|
||||
homeassistant/components/ritassist/device_tracker.py
|
||||
homeassistant/components/rocketchat/notify.py
|
||||
homeassistant/components/roku/*
|
||||
homeassistant/components/roomba/vacuum.py
|
||||
@@ -539,12 +563,17 @@ omit =
|
||||
homeassistant/components/slack/notify.py
|
||||
homeassistant/components/sma/sensor.py
|
||||
homeassistant/components/smappee/*
|
||||
homeassistant/components/smarty/*
|
||||
homeassistant/components/smarthab/*
|
||||
homeassistant/components/smtp/notify.py
|
||||
homeassistant/components/snapcast/media_player.py
|
||||
homeassistant/components/snmp/*
|
||||
homeassistant/components/sochain/sensor.py
|
||||
homeassistant/components/socialblade/sensor.py
|
||||
homeassistant/components/solaredge/sensor.py
|
||||
homeassistant/components/solaredge_local/sensor.py
|
||||
homeassistant/components/solax/sensor.py
|
||||
homeassistant/components/somfy/*
|
||||
homeassistant/components/somfy_mylink/*
|
||||
homeassistant/components/sonarr/sensor.py
|
||||
homeassistant/components/songpal/media_player.py
|
||||
@@ -560,7 +589,9 @@ omit =
|
||||
homeassistant/components/starlingbank/sensor.py
|
||||
homeassistant/components/steam_online/sensor.py
|
||||
homeassistant/components/stiebel_eltron/*
|
||||
homeassistant/components/streamlabswater/*
|
||||
homeassistant/components/stride/notify.py
|
||||
homeassistant/components/suez_water/*
|
||||
homeassistant/components/supervisord/sensor.py
|
||||
homeassistant/components/swiss_hydrological_data/sensor.py
|
||||
homeassistant/components/swiss_public_transport/sensor.py
|
||||
@@ -606,20 +637,24 @@ omit =
|
||||
homeassistant/components/tomato/device_tracker.py
|
||||
homeassistant/components/toon/*
|
||||
homeassistant/components/torque/sensor.py
|
||||
homeassistant/components/totalconnect/alarm_control_panel.py
|
||||
homeassistant/components/totalconnect/*
|
||||
homeassistant/components/touchline/climate.py
|
||||
homeassistant/components/tplink/device_tracker.py
|
||||
homeassistant/components/tplink/light.py
|
||||
homeassistant/components/tplink/switch.py
|
||||
homeassistant/components/tplink_lte/*
|
||||
homeassistant/components/traccar/device_tracker.py
|
||||
homeassistant/components/traccar/const.py
|
||||
homeassistant/components/trackr/device_tracker.py
|
||||
homeassistant/components/tradfri/*
|
||||
homeassistant/components/tradfri/light.py
|
||||
homeassistant/components/trafikverket_train/sensor.py
|
||||
homeassistant/components/trafikverket_weatherstation/sensor.py
|
||||
homeassistant/components/transmission/*
|
||||
homeassistant/components/travisci/sensor.py
|
||||
homeassistant/components/tuya/*
|
||||
homeassistant/components/twentemilieu/const.py
|
||||
homeassistant/components/twentemilieu/sensor.py
|
||||
homeassistant/components/twilio_call/notify.py
|
||||
homeassistant/components/twilio_sms/notify.py
|
||||
homeassistant/components/twitch/sensor.py
|
||||
@@ -634,16 +669,27 @@ omit =
|
||||
homeassistant/components/uptimerobot/binary_sensor.py
|
||||
homeassistant/components/uscis/sensor.py
|
||||
homeassistant/components/usps/*
|
||||
homeassistant/components/vallox/*
|
||||
homeassistant/components/vasttrafik/sensor.py
|
||||
homeassistant/components/velbus/*
|
||||
homeassistant/components/velbus/__init__.py
|
||||
homeassistant/components/velbus/binary_sensor.py
|
||||
homeassistant/components/velbus/climate.py
|
||||
homeassistant/components/velbus/const.py
|
||||
homeassistant/components/velbus/cover.py
|
||||
homeassistant/components/velbus/sensor.py
|
||||
homeassistant/components/velbus/switch.py
|
||||
homeassistant/components/velux/*
|
||||
homeassistant/components/venstar/climate.py
|
||||
homeassistant/components/vera/*
|
||||
homeassistant/components/verisure/*
|
||||
homeassistant/components/vesync/__init__.py
|
||||
homeassistant/components/vesync/common.py
|
||||
homeassistant/components/vesync/const.py
|
||||
homeassistant/components/vesync/switch.py
|
||||
homeassistant/components/viaggiatreno/sensor.py
|
||||
homeassistant/components/vizio/media_player.py
|
||||
homeassistant/components/vlc/media_player.py
|
||||
homeassistant/components/vlc_telnet/media_player.py
|
||||
homeassistant/components/volkszaehler/sensor.py
|
||||
homeassistant/components/volumio/media_player.py
|
||||
homeassistant/components/volvooncall/*
|
||||
@@ -651,6 +697,7 @@ omit =
|
||||
homeassistant/components/waqi/sensor.py
|
||||
homeassistant/components/waterfurnace/*
|
||||
homeassistant/components/watson_iot/*
|
||||
homeassistant/components/watson_tts/tts.py
|
||||
homeassistant/components/waze_travel_time/sensor.py
|
||||
homeassistant/components/webostv/*
|
||||
homeassistant/components/wemo/*
|
||||
@@ -661,6 +708,8 @@ omit =
|
||||
homeassistant/components/worldtidesinfo/sensor.py
|
||||
homeassistant/components/worxlandroid/sensor.py
|
||||
homeassistant/components/wunderlist/*
|
||||
homeassistant/components/wwlln/__init__.py
|
||||
homeassistant/components/wwlln/geo_location.py
|
||||
homeassistant/components/x10/light.py
|
||||
homeassistant/components/xbox_live/sensor.py
|
||||
homeassistant/components/xeoma/camera.py
|
||||
|
||||
30
.devcontainer/Dockerfile
Normal file
30
.devcontainer/Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
||||
FROM python:3.7
|
||||
|
||||
RUN apt-get update \
|
||||
&& apt-get install -y --no-install-recommends \
|
||||
libudev-dev \
|
||||
libavformat-dev \
|
||||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavutil-dev \
|
||||
libswscale-dev \
|
||||
libswresample-dev \
|
||||
libavfilter-dev \
|
||||
git \
|
||||
&& apt-get clean \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release \
|
||||
&& cd hass-release \
|
||||
&& pip3 install -e .
|
||||
|
||||
WORKDIR /workspace
|
||||
|
||||
# Install Python dependencies from requirements.txt if it exists
|
||||
COPY requirements_test_all.txt homeassistant/package_constraints.txt /workspace/
|
||||
RUN pip3 install -r requirements_test_all.txt -c package_constraints.txt
|
||||
|
||||
# Set the default shell to bash instead of sh
|
||||
ENV SHELL /bin/bash
|
||||
35
.devcontainer/devcontainer.json
Normal file
35
.devcontainer/devcontainer.json
Normal file
@@ -0,0 +1,35 @@
|
||||
// See https://aka.ms/vscode-remote/devcontainer.json for format details.
|
||||
{
|
||||
"name": "Home Assistant Dev",
|
||||
"context": "..",
|
||||
"dockerFile": "Dockerfile",
|
||||
"postCreateCommand": "pip3 install -e .",
|
||||
"appPort": 8123,
|
||||
"runArgs": [
|
||||
"-e", "GIT_EDTIOR='code --wait'"
|
||||
],
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-azure-devops.azure-pipelines",
|
||||
"redhat.vscode-yaml"
|
||||
],
|
||||
"settings": {
|
||||
"python.pythonPath": "/usr/local/bin/python",
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
"files.trimTrailingWhitespace": true,
|
||||
"editor.rulers": [80],
|
||||
"terminal.integrated.shell.linux": "/bin/bash",
|
||||
"yaml.customTags": [
|
||||
"!secret scalar",
|
||||
"!include_dir_named scalar",
|
||||
"!include_dir_list scalar",
|
||||
"!include_dir_merge_list scalar",
|
||||
"!include_dir_merge_named scalar"
|
||||
]
|
||||
}
|
||||
}
|
||||
2
.github/ISSUE_TEMPLATE.md
vendored
2
.github/ISSUE_TEMPLATE.md
vendored
@@ -3,7 +3,7 @@
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Frontend issues should be submitted to the home-assistant-polymer repository: https://github.com/home-assistant/home-assistant-polymer/issues
|
||||
- iOS issues should be submitted to the home-assistant-iOS repository: https://github.com/home-assistant/home-assistant-iOS/issues
|
||||
- Do not report issues for components if you are using custom components: files in <config-dir>/custom_components
|
||||
- Do not report issues for integrations if you are using custom integration: files in <config-dir>/custom_components
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
-->
|
||||
|
||||
2
.github/ISSUE_TEMPLATE/Bug_report.md
vendored
2
.github/ISSUE_TEMPLATE/Bug_report.md
vendored
@@ -9,7 +9,7 @@ about: Create a report to help us improve
|
||||
- Make sure you are running the latest version of Home Assistant before reporting an issue: https://github.com/home-assistant/home-assistant/releases
|
||||
- Frontend issues should be submitted to the home-assistant-polymer repository: https://github.com/home-assistant/home-assistant-polymer/issues
|
||||
- iOS issues should be submitted to the home-assistant-iOS repository: https://github.com/home-assistant/home-assistant-iOS/issues
|
||||
- Do not report issues for components if you are using custom components: files in <config-dir>/custom_components
|
||||
- Do not report issues for integrations if you are using a custom integration: files in <config-dir>/custom_components
|
||||
- This is for bugs only. Feature and enhancement requests should go in our community forum: https://community.home-assistant.io/c/feature-requests
|
||||
- Provide as many details as possible. Paste logs, configuration sample and code into the backticks. Do not delete any text from this template!
|
||||
-->
|
||||
|
||||
27
.github/lock.yml
vendored
Normal file
27
.github/lock.yml
vendored
Normal file
@@ -0,0 +1,27 @@
|
||||
# Configuration for Lock Threads - https://github.com/dessant/lock-threads
|
||||
|
||||
# Number of days of inactivity before a closed issue or pull request is locked
|
||||
daysUntilLock: 1
|
||||
|
||||
# Skip issues and pull requests created before a given timestamp. Timestamp must
|
||||
# follow ISO 8601 (`YYYY-MM-DD`). Set to `false` to disable
|
||||
skipCreatedBefore: 2019-07-01
|
||||
|
||||
# Issues and pull requests with these labels will be ignored. Set to `[]` to disable
|
||||
exemptLabels: []
|
||||
|
||||
# Label to add before locking, such as `outdated`. Set to `false` to disable
|
||||
lockLabel: false
|
||||
|
||||
# Comment to post before locking. Set to `false` to disable
|
||||
lockComment: false
|
||||
|
||||
# Assign `resolved` as the reason for locking. Set to `false` to disable
|
||||
setLockReason: false
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: pulls
|
||||
|
||||
# Optionally, specify configuration settings just for `issues` or `pulls`
|
||||
issues:
|
||||
daysUntilLock: 30
|
||||
54
.github/stale.yml
vendored
Normal file
54
.github/stale.yml
vendored
Normal file
@@ -0,0 +1,54 @@
|
||||
# Configuration for probot-stale - https://github.com/probot/stale
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request becomes stale
|
||||
daysUntilStale: 90
|
||||
|
||||
# Number of days of inactivity before an Issue or Pull Request with the stale label is closed.
|
||||
# Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale.
|
||||
daysUntilClose: 7
|
||||
|
||||
# Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled)
|
||||
onlyLabels: []
|
||||
|
||||
# Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable
|
||||
exemptLabels:
|
||||
- under investigation
|
||||
|
||||
# Set to true to ignore issues in a project (defaults to false)
|
||||
exemptProjects: true
|
||||
|
||||
# Set to true to ignore issues in a milestone (defaults to false)
|
||||
exemptMilestones: true
|
||||
|
||||
# Set to true to ignore issues with an assignee (defaults to false)
|
||||
exemptAssignees: false
|
||||
|
||||
# Label to use when marking as stale
|
||||
staleLabel: stale
|
||||
|
||||
# Comment to post when marking as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
There hasn't been any activity on this issue recently. Due to the high number
|
||||
of incoming GitHub notifications, we have to clean some of the old issues,
|
||||
as many of them have already been resolved with the latest updates.
|
||||
|
||||
Please make sure to update to the latest Home Assistant version and check
|
||||
if that solves the issue. Let us know if that works for you by adding a
|
||||
comment 👍
|
||||
|
||||
This issue now has been marked as stale and will be closed if no further
|
||||
activity occurs. Thank you for your contributions.
|
||||
|
||||
# Comment to post when removing the stale label.
|
||||
# unmarkComment: >
|
||||
# Your comment here.
|
||||
|
||||
# Comment to post when closing a stale Issue or Pull Request.
|
||||
# closeComment: >
|
||||
# Your comment here.
|
||||
|
||||
# Limit the number of actions per hour, from 1-30. Default is 30
|
||||
limitPerRun: 30
|
||||
|
||||
# Limit to only `issues` or `pulls`
|
||||
only: issues
|
||||
10
.gitignore
vendored
10
.gitignore
vendored
@@ -4,6 +4,10 @@ config2/*
|
||||
tests/testing_config/deps
|
||||
tests/testing_config/home-assistant.log
|
||||
|
||||
# hass-release
|
||||
data/
|
||||
.token
|
||||
|
||||
# Hide sublime text stuff
|
||||
*.sublime-project
|
||||
*.sublime-workspace
|
||||
@@ -94,7 +98,10 @@ virtualization/vagrant/.vagrant
|
||||
virtualization/vagrant/config
|
||||
|
||||
# Visual Studio Code
|
||||
.vscode
|
||||
.vscode/*
|
||||
!.vscode/cSpell.json
|
||||
!.vscode/extensions.json
|
||||
!.vscode/tasks.json
|
||||
|
||||
# Built docs
|
||||
docs/build
|
||||
@@ -107,6 +114,7 @@ desktop.ini
|
||||
|
||||
# mypy
|
||||
/.mypy_cache/*
|
||||
/.dmypy.json
|
||||
|
||||
# Secrets
|
||||
.lokalise_token
|
||||
|
||||
8
.pre-commit-config.yaml
Normal file
8
.pre-commit-config.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
repos:
|
||||
- repo: https://github.com/python/black
|
||||
rev: 19.3b0
|
||||
hooks:
|
||||
- id: black
|
||||
args:
|
||||
- --safe
|
||||
- --quiet
|
||||
10
.travis.yml
10
.travis.yml
@@ -16,14 +16,14 @@ addons:
|
||||
matrix:
|
||||
fast_finish: true
|
||||
include:
|
||||
- python: "3.5.3"
|
||||
- python: "3.6"
|
||||
env: TOXENV=lint
|
||||
- python: "3.5.3"
|
||||
- python: "3.6"
|
||||
env: TOXENV=pylint
|
||||
- python: "3.5.3"
|
||||
- python: "3.6"
|
||||
env: TOXENV=typing
|
||||
- python: "3.5.3"
|
||||
env: TOXENV=py35
|
||||
- python: "3.6"
|
||||
env: TOXENV=py36
|
||||
- python: "3.7"
|
||||
env: TOXENV=py37
|
||||
|
||||
|
||||
92
.vscode/tasks.json
vendored
Normal file
92
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Preview",
|
||||
"type": "shell",
|
||||
"command": "hass -c ./config",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pytest",
|
||||
"type": "shell",
|
||||
"command": "pytest --timeout=10 tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Flake8",
|
||||
"type": "shell",
|
||||
"command": "flake8 homeassistant tests",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Pylint",
|
||||
"type": "shell",
|
||||
"command": "pylint homeassistant",
|
||||
"dependsOn": [
|
||||
"Install all Requirements"
|
||||
],
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true,
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Generate Requirements",
|
||||
"type": "shell",
|
||||
"command": "./script/gen_requirements_all.py",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Install all Requirements",
|
||||
"type": "shell",
|
||||
"command": "pip3 install -r requirements_all.txt -c homeassistant/package_constraints.txt",
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"reveal": "always",
|
||||
"panel": "new"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
66
CODEOWNERS
66
CODEOWNERS
@@ -17,34 +17,42 @@ virtualization/Docker/* @home-assistant/docker
|
||||
homeassistant/scripts/check_config.py @kellerza
|
||||
|
||||
# Integrations
|
||||
homeassistant/components/adguard/* @frenck
|
||||
homeassistant/components/airvisual/* @bachya
|
||||
homeassistant/components/alarm_control_panel/* @colinodell
|
||||
homeassistant/components/alpha_vantage/* @fabaff
|
||||
homeassistant/components/amazon_polly/* @robbiet480
|
||||
homeassistant/components/ambiclimate/* @danielhiversen
|
||||
homeassistant/components/ambient_station/* @bachya
|
||||
homeassistant/components/apache_kafka/* @bachya
|
||||
homeassistant/components/api/* @home-assistant/core
|
||||
homeassistant/components/aprs/* @PhilRW
|
||||
homeassistant/components/arcam_fmj/* @elupus
|
||||
homeassistant/components/arduino/* @fabaff
|
||||
homeassistant/components/arest/* @fabaff
|
||||
homeassistant/components/asuswrt/* @kennedyshead
|
||||
homeassistant/components/aurora_abb_powerone/* @davet2001
|
||||
homeassistant/components/auth/* @home-assistant/core
|
||||
homeassistant/components/automatic/* @armills
|
||||
homeassistant/components/automation/* @home-assistant/core
|
||||
homeassistant/components/avea/* @pattyland
|
||||
homeassistant/components/awair/* @danielsjf
|
||||
homeassistant/components/aws/* @awarecan @robbiet480
|
||||
homeassistant/components/axis/* @kane610
|
||||
homeassistant/components/azure_event_hub/* @eavanvalkenburg
|
||||
homeassistant/components/bitcoin/* @fabaff
|
||||
homeassistant/components/bizkaibus/* @UgaitzEtxebarria
|
||||
homeassistant/components/blink/* @fronzbot
|
||||
homeassistant/components/bmw_connected_drive/* @ChristianKuehnel
|
||||
homeassistant/components/braviatv/* @robbiet480
|
||||
homeassistant/components/broadlink/* @danielhiversen
|
||||
homeassistant/components/brunt/* @eavanvalkenburg
|
||||
homeassistant/components/bt_smarthub/* @jxwolstenholme
|
||||
homeassistant/components/buienradar/* @mjj4791 @ties
|
||||
homeassistant/components/cisco_ios/* @fbradyirl
|
||||
homeassistant/components/cisco_mobility_express/* @fbradyirl
|
||||
homeassistant/components/cisco_webex_teams/* @fbradyirl
|
||||
homeassistant/components/ciscospark/* @fbradyirl
|
||||
homeassistant/components/cloud/* @home-assistant/core
|
||||
homeassistant/components/cloud/* @home-assistant/cloud
|
||||
homeassistant/components/cloudflare/* @ludeeus
|
||||
homeassistant/components/config/* @home-assistant/core
|
||||
homeassistant/components/configurator/* @home-assistant/core
|
||||
@@ -57,7 +65,9 @@ homeassistant/components/cups/* @fabaff
|
||||
homeassistant/components/daikin/* @fredrike @rofrantz
|
||||
homeassistant/components/darksky/* @fabaff
|
||||
homeassistant/components/deconz/* @kane610
|
||||
homeassistant/components/delijn/* @bollewolle
|
||||
homeassistant/components/demo/* @home-assistant/core
|
||||
homeassistant/components/device_automation/* @home-assistant/core
|
||||
homeassistant/components/digital_ocean/* @fabaff
|
||||
homeassistant/components/discogs/* @thibmaek
|
||||
homeassistant/components/doorbird/* @oblogic7
|
||||
@@ -66,9 +76,11 @@ homeassistant/components/ecovacs/* @OverloadUT
|
||||
homeassistant/components/edp_redy/* @abmantis
|
||||
homeassistant/components/egardia/* @jeroenterheerdt
|
||||
homeassistant/components/eight_sleep/* @mezz64
|
||||
homeassistant/components/elv/* @majuss
|
||||
homeassistant/components/emby/* @mezz64
|
||||
homeassistant/components/enigma2/* @fbradyirl
|
||||
homeassistant/components/enocean/* @bdurrer
|
||||
homeassistant/components/environment_canada/* @michaeldavie
|
||||
homeassistant/components/ephember/* @ttroy50
|
||||
homeassistant/components/epsonworkforce/* @ThaStealth
|
||||
homeassistant/components/eq3btsmart/* @rytilahti
|
||||
@@ -81,14 +93,18 @@ homeassistant/components/fitbit/* @robbiet480
|
||||
homeassistant/components/fixer/* @fabaff
|
||||
homeassistant/components/flock/* @fabaff
|
||||
homeassistant/components/flunearyou/* @bachya
|
||||
homeassistant/components/fortigate/* @kifeo
|
||||
homeassistant/components/fortios/* @kimfrellsen
|
||||
homeassistant/components/foursquare/* @robbiet480
|
||||
homeassistant/components/freebox/* @snoof85
|
||||
homeassistant/components/frontend/* @home-assistant/core
|
||||
homeassistant/components/fronius/* @nielstron
|
||||
homeassistant/components/frontend/* @home-assistant/frontend
|
||||
homeassistant/components/gearbest/* @HerrHofrat
|
||||
homeassistant/components/geniushub/* @zxdavb
|
||||
homeassistant/components/gitter/* @fabaff
|
||||
homeassistant/components/glances/* @fabaff
|
||||
homeassistant/components/gntp/* @robbiet480
|
||||
homeassistant/components/google_cloud/* @lufton
|
||||
homeassistant/components/google_translate/* @awarecan
|
||||
homeassistant/components/google_travel_time/* @robbiet480
|
||||
homeassistant/components/googlehome/* @ludeeus
|
||||
@@ -104,9 +120,9 @@ homeassistant/components/history/* @home-assistant/core
|
||||
homeassistant/components/history_graph/* @andrey-git
|
||||
homeassistant/components/hive/* @Rendili @KJonline
|
||||
homeassistant/components/homeassistant/* @home-assistant/core
|
||||
homeassistant/components/homekit/* @cdce8p
|
||||
homeassistant/components/homekit_controller/* @Jc2k
|
||||
homeassistant/components/homematic/* @pvizeli @danielperna84
|
||||
homeassistant/components/honeywell/* @zxdavb
|
||||
homeassistant/components/html5/* @robbiet480
|
||||
homeassistant/components/http/* @home-assistant/core
|
||||
homeassistant/components/huawei_lte/* @scop
|
||||
@@ -131,21 +147,26 @@ homeassistant/components/kodi/* @armills
|
||||
homeassistant/components/konnected/* @heythisisnate
|
||||
homeassistant/components/lametric/* @robbiet480
|
||||
homeassistant/components/launch_library/* @ludeeus
|
||||
homeassistant/components/lcn/* @alengwenus
|
||||
homeassistant/components/life360/* @pnbruckner
|
||||
homeassistant/components/lifx/* @amelchio
|
||||
homeassistant/components/lifx_cloud/* @amelchio
|
||||
homeassistant/components/lifx_legacy/* @amelchio
|
||||
homeassistant/components/linky/* @tiste @Quentame
|
||||
homeassistant/components/linux_battery/* @fabaff
|
||||
homeassistant/components/liveboxplaytv/* @pschmitt
|
||||
homeassistant/components/logger/* @home-assistant/core
|
||||
homeassistant/components/logi_circle/* @evanjd
|
||||
homeassistant/components/lovelace/* @home-assistant/core
|
||||
homeassistant/components/lovelace/* @home-assistant/frontend
|
||||
homeassistant/components/luci/* @fbradyirl
|
||||
homeassistant/components/luftdaten/* @fabaff
|
||||
homeassistant/components/mastodon/* @fabaff
|
||||
homeassistant/components/matrix/* @tinloaf
|
||||
homeassistant/components/mcp23017/* @jardiamj
|
||||
homeassistant/components/mediaroom/* @dgomes
|
||||
homeassistant/components/melissa/* @kennedyshead
|
||||
homeassistant/components/met/* @danielhiversen
|
||||
homeassistant/components/meteo_france/* @victorcerutti @oncleben31
|
||||
homeassistant/components/meteoalarm/* @rolfberkenbosch
|
||||
homeassistant/components/miflora/* @danielhiversen @ChristianKuehnel
|
||||
homeassistant/components/mill/* @danielhiversen
|
||||
@@ -165,31 +186,37 @@ homeassistant/components/nissan_leaf/* @filcole
|
||||
homeassistant/components/nmbs/* @thibmaek
|
||||
homeassistant/components/no_ip/* @fabaff
|
||||
homeassistant/components/notify/* @home-assistant/core
|
||||
homeassistant/components/notion/* @bachya
|
||||
homeassistant/components/nsw_fuel_station/* @nickw444
|
||||
homeassistant/components/nuki/* @pschmitt
|
||||
homeassistant/components/ohmconnect/* @robbiet480
|
||||
homeassistant/components/onboarding/* @home-assistant/core
|
||||
homeassistant/components/opentherm_gw/* @mvn23
|
||||
homeassistant/components/openuv/* @bachya
|
||||
homeassistant/components/openweathermap/* @fabaff
|
||||
homeassistant/components/orangepi_gpio/* @pascallj
|
||||
homeassistant/components/owlet/* @oblogic7
|
||||
homeassistant/components/panel_custom/* @home-assistant/core
|
||||
homeassistant/components/panel_iframe/* @home-assistant/core
|
||||
homeassistant/components/panel_custom/* @home-assistant/frontend
|
||||
homeassistant/components/panel_iframe/* @home-assistant/frontend
|
||||
homeassistant/components/persistent_notification/* @home-assistant/core
|
||||
homeassistant/components/philips_js/* @elupus
|
||||
homeassistant/components/pi_hole/* @fabaff
|
||||
homeassistant/components/plaato/* @JohNan
|
||||
homeassistant/components/plant/* @ChristianKuehnel
|
||||
homeassistant/components/point/* @fredrike
|
||||
homeassistant/components/ps4/* @ktnrg45
|
||||
homeassistant/components/ptvsd/* @swamp-ig
|
||||
homeassistant/components/push/* @dgomes
|
||||
homeassistant/components/pvoutput/* @fabaff
|
||||
homeassistant/components/qld_bushfire/* @exxamalte
|
||||
homeassistant/components/qnap/* @colinodell
|
||||
homeassistant/components/quantum_gateway/* @cisasteelersfan
|
||||
homeassistant/components/qwikswitch/* @kellerza
|
||||
homeassistant/components/raincloud/* @vanstinator
|
||||
homeassistant/components/rainforest_eagle/* @gtdiehl
|
||||
homeassistant/components/rainmachine/* @bachya
|
||||
homeassistant/components/random/* @fabaff
|
||||
homeassistant/components/repetier/* @MTrab
|
||||
homeassistant/components/rfxtrx/* @danielhiversen
|
||||
homeassistant/components/rmvtransport/* @cgtobi
|
||||
homeassistant/components/roomba/* @pschmitt
|
||||
@@ -197,6 +224,7 @@ homeassistant/components/ruter/* @ludeeus
|
||||
homeassistant/components/scene/* @home-assistant/core
|
||||
homeassistant/components/scrape/* @fabaff
|
||||
homeassistant/components/script/* @home-assistant/core
|
||||
homeassistant/components/sense/* @kbickar
|
||||
homeassistant/components/sensibo/* @andrey-git
|
||||
homeassistant/components/serial/* @fabaff
|
||||
homeassistant/components/seventeentrack/* @bachya
|
||||
@@ -205,15 +233,23 @@ homeassistant/components/shiftr/* @fabaff
|
||||
homeassistant/components/shodan/* @fabaff
|
||||
homeassistant/components/simplisafe/* @bachya
|
||||
homeassistant/components/sma/* @kellerza
|
||||
homeassistant/components/smarthab/* @outadoc
|
||||
homeassistant/components/smartthings/* @andrewsayre
|
||||
homeassistant/components/smarty/* @z0mbieprocess
|
||||
homeassistant/components/smtp/* @fabaff
|
||||
homeassistant/components/solaredge_local/* @drobtravels
|
||||
homeassistant/components/solax/* @squishykid
|
||||
homeassistant/components/somfy/* @tetienne
|
||||
homeassistant/components/songpal/* @rytilahti
|
||||
homeassistant/components/sonos/* @amelchio
|
||||
homeassistant/components/spaceapi/* @fabaff
|
||||
homeassistant/components/spider/* @peternijssen
|
||||
homeassistant/components/sql/* @dgomes
|
||||
homeassistant/components/statistics/* @fabaff
|
||||
homeassistant/components/stiebel_eltron/* @fucm
|
||||
homeassistant/components/sun/* @home-assistant/core
|
||||
homeassistant/components/stream/* @hunterjm
|
||||
homeassistant/components/suez_water/* @ooii
|
||||
homeassistant/components/sun/* @Swamp-Ig
|
||||
homeassistant/components/supla/* @mwegrzynek
|
||||
homeassistant/components/swiss_hydrological_data/* @fabaff
|
||||
homeassistant/components/swiss_public_transport/* @fabaff
|
||||
@@ -239,41 +275,47 @@ homeassistant/components/toon/* @frenck
|
||||
homeassistant/components/tplink/* @rytilahti
|
||||
homeassistant/components/traccar/* @ludeeus
|
||||
homeassistant/components/tradfri/* @ggravlingen
|
||||
homeassistant/components/trafikverket_train/* @endor-force
|
||||
homeassistant/components/tts/* @robbiet480
|
||||
homeassistant/components/twentemilieu/* @frenck
|
||||
homeassistant/components/twilio_call/* @robbiet480
|
||||
homeassistant/components/twilio_sms/* @robbiet480
|
||||
homeassistant/components/uber/* @robbiet480
|
||||
homeassistant/components/unifi/* @kane610
|
||||
homeassistant/components/upcloud/* @scop
|
||||
homeassistant/components/updater/* @home-assistant/core
|
||||
homeassistant/components/upnp/* @robbiet480
|
||||
homeassistant/components/uptimerobot/* @ludeeus
|
||||
homeassistant/components/utility_meter/* @dgomes
|
||||
homeassistant/components/velbus/* @ceral2nd
|
||||
homeassistant/components/velux/* @Julius2342
|
||||
homeassistant/components/version/* @fabaff
|
||||
homeassistant/components/vesync/* @markperdue @webdjoe
|
||||
homeassistant/components/vizio/* @raman325
|
||||
homeassistant/components/vlc_telnet/* @rodripf
|
||||
homeassistant/components/waqi/* @andrey-git
|
||||
homeassistant/components/watson_tts/* @rutkai
|
||||
homeassistant/components/weather/* @fabaff
|
||||
homeassistant/components/weblink/* @home-assistant/core
|
||||
homeassistant/components/websocket_api/* @home-assistant/core
|
||||
homeassistant/components/wemo/* @sqldiablo
|
||||
homeassistant/components/worldclock/* @fabaff
|
||||
homeassistant/components/wwlln/* @bachya
|
||||
homeassistant/components/xfinity/* @cisasteelersfan
|
||||
homeassistant/components/xiaomi_aqara/* @danielhiversen @syssi
|
||||
homeassistant/components/xiaomi_miio/* @rytilahti @syssi
|
||||
homeassistant/components/xiaomi_tv/* @fattdev
|
||||
homeassistant/components/xiaomi_tv/* @simse
|
||||
homeassistant/components/xmpp/* @fabaff @flowolf
|
||||
homeassistant/components/yamaha_musiccast/* @jalmeroth
|
||||
homeassistant/components/yeelight/* @rytilahti @zewelor
|
||||
homeassistant/components/yeelightsunflower/* @lindsaymarkward
|
||||
homeassistant/components/yessssms/* @flowolf
|
||||
homeassistant/components/yi/* @bachya
|
||||
homeassistant/components/zeroconf/* @robbiet480
|
||||
homeassistant/components/yr/* @danielhiversen
|
||||
homeassistant/components/zeroconf/* @robbiet480 @Kane610
|
||||
homeassistant/components/zha/* @dmulcahey @adminiuga
|
||||
homeassistant/components/zone/* @home-assistant/core
|
||||
homeassistant/components/zoneminder/* @rohankapoorcom
|
||||
homeassistant/components/zwave/* @home-assistant/z-wave
|
||||
|
||||
# Individual files
|
||||
homeassistant/components/group/cover @cdce8p
|
||||
homeassistant/components/demo/weather @fabaff
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
# When updating this file, please also update virtualization/Docker/Dockerfile.dev
|
||||
# This way, the development image and the production image are kept in sync.
|
||||
|
||||
FROM python:3.7
|
||||
FROM python:3.7-buster
|
||||
LABEL maintainer="Paulus Schoutsen <Paulus@PaulusSchoutsen.nl>"
|
||||
|
||||
# Uncomment any of the following lines to disable the installation.
|
||||
@@ -24,12 +24,14 @@ RUN virtualization/Docker/setup_docker_prereqs
|
||||
|
||||
# Install hass component dependencies
|
||||
COPY requirements_all.txt requirements_all.txt
|
||||
# Uninstall enum34 because some dependencies install it but breaks Python 3.4+.
|
||||
# See PR #8103 for more info.
|
||||
RUN pip3 install --no-cache-dir -r requirements_all.txt && \
|
||||
pip3 install --no-cache-dir mysqlclient psycopg2 uvloop==0.12.2 cchardet cython tensorflow
|
||||
|
||||
# Copy source
|
||||
COPY . .
|
||||
|
||||
EXPOSE 8123
|
||||
EXPOSE 8300
|
||||
EXPOSE 51827
|
||||
|
||||
CMD [ "python", "-m", "homeassistant", "--config", "/config" ]
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
Home Assistant |Build Status| |CI Status| |Coverage Status| |Chat Status|
|
||||
Home Assistant |Chat Status|
|
||||
=================================================================================
|
||||
|
||||
Home Assistant is a home automation platform running on Python 3. It is able to track and control all devices at home and offer a platform for automating control.
|
||||
@@ -27,12 +27,6 @@ components <https://developers.home-assistant.io/docs/en/creating_component_inde
|
||||
If you run into issues while using Home Assistant or during development
|
||||
of a component, check the `Home Assistant help section <https://home-assistant.io/help/>`__ of our website for further help and information.
|
||||
|
||||
.. |Build Status| image:: https://travis-ci.org/home-assistant/home-assistant.svg?branch=dev
|
||||
:target: https://travis-ci.org/home-assistant/home-assistant
|
||||
.. |CI Status| image:: https://circleci.com/gh/home-assistant/home-assistant.svg?style=shield
|
||||
:target: https://circleci.com/gh/home-assistant/home-assistant
|
||||
.. |Coverage Status| image:: https://img.shields.io/coveralls/home-assistant/home-assistant.svg
|
||||
:target: https://coveralls.io/r/home-assistant/home-assistant?branch=master
|
||||
.. |Chat Status| image:: https://img.shields.io/discord/330944238910963714.svg
|
||||
:target: https://discord.gg/c5DvZ4e
|
||||
.. |screenshot-states| image:: https://raw.github.com/home-assistant/home-assistant/master/docs/screenshots.png
|
||||
|
||||
215
azure-pipelines-ci.yml
Normal file
215
azure-pipelines-ci.yml
Normal file
@@ -0,0 +1,215 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- rc
|
||||
- dev
|
||||
- master
|
||||
pr:
|
||||
- rc
|
||||
- dev
|
||||
- master
|
||||
|
||||
resources:
|
||||
containers:
|
||||
- container: 36
|
||||
image: homeassistant/ci-azure:3.6
|
||||
- container: 37
|
||||
image: homeassistant/ci-azure:3.7
|
||||
variables:
|
||||
- name: ArtifactFeed
|
||||
value: '2df3ae11-3bf6-49bc-a809-ba0d340d6a6d'
|
||||
- name: PythonMain
|
||||
value: '36'
|
||||
- group: codecov
|
||||
|
||||
stages:
|
||||
|
||||
- stage: 'Overview'
|
||||
jobs:
|
||||
- job: 'Lint'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
container: $[ variables['PythonMain'] ]
|
||||
steps:
|
||||
- script: |
|
||||
python -m venv venv
|
||||
|
||||
. venv/bin/activate
|
||||
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
|
||||
displayName: 'Setup Env'
|
||||
- script: |
|
||||
. venv/bin/activate
|
||||
flake8 homeassistant tests script
|
||||
displayName: 'Run flake8'
|
||||
- job: 'Validate'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
container: $[ variables['PythonMain'] ]
|
||||
steps:
|
||||
- script: |
|
||||
python -m venv venv
|
||||
|
||||
. venv/bin/activate
|
||||
pip install -e .
|
||||
displayName: 'Setup Env'
|
||||
- script: |
|
||||
. venv/bin/activate
|
||||
python -m script.hassfest validate
|
||||
displayName: 'Validate manifests'
|
||||
- script: |
|
||||
. venv/bin/activate
|
||||
./script/gen_requirements_all.py validate
|
||||
displayName: 'requirements_all validate'
|
||||
- job: 'CheckFormat'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
container: $[ variables['PythonMain'] ]
|
||||
steps:
|
||||
- script: |
|
||||
python -m venv venv
|
||||
|
||||
. venv/bin/activate
|
||||
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
|
||||
displayName: 'Setup Env'
|
||||
- script: |
|
||||
. venv/bin/activate
|
||||
./script/check_format
|
||||
displayName: 'Check Black formatting'
|
||||
|
||||
- stage: 'Tests'
|
||||
dependsOn:
|
||||
- 'Overview'
|
||||
jobs:
|
||||
- job: 'PyTest'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 3
|
||||
matrix:
|
||||
Python36:
|
||||
python.container: '36'
|
||||
Python37:
|
||||
python.container: '37'
|
||||
container: $[ variables['python.container'] ]
|
||||
steps:
|
||||
- script: |
|
||||
python --version > .cache
|
||||
displayName: 'Set python $(python.container) for requirement cache'
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: 'Restore artifacts based on Requirements'
|
||||
inputs:
|
||||
keyfile: 'requirements_test_all.txt, .cache, homeassistant/package_constraints.txt'
|
||||
targetfolder: './venv'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
- script: |
|
||||
set -e
|
||||
python -m venv venv
|
||||
|
||||
. venv/bin/activate
|
||||
pip install -U pip setuptools pytest-azurepipelines -c homeassistant/package_constraints.txt
|
||||
pip install -r requirements_test_all.txt -c homeassistant/package_constraints.txt
|
||||
# This is a TEMP. Eventually we should make sure our 4 dependencies drop typing.
|
||||
# Find offending deps with `pipdeptree -r -p typing`
|
||||
pip uninstall -y typing
|
||||
displayName: 'Create Virtual Environment & Install Requirements'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
# Explicit Cache Save (instead of using RestoreAndSaveCache)
|
||||
# Dont wait with cache save for all the other task in this job to complete (±30 minutes), other parallel jobs might utilize this
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: 'Save artifacts based on Requirements'
|
||||
inputs:
|
||||
keyfile: 'requirements_test_all.txt, .cache, homeassistant/package_constraints.txt'
|
||||
targetfolder: './venv'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
- script: |
|
||||
. venv/bin/activate
|
||||
pip install -e .
|
||||
displayName: 'Install Home Assistant for python $(python.container)'
|
||||
- script: |
|
||||
. venv/bin/activate
|
||||
pytest --timeout=9 --durations=10 --junitxml=test-results.xml -qq -o console_output_style=count -p no:sugar tests
|
||||
displayName: 'Run pytest for python $(python.container)'
|
||||
condition: and(succeeded(), ne(variables['python.container'], variables['PythonMain']))
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
. venv/bin/activate
|
||||
pytest --timeout=9 --durations=10 --junitxml=test-results.xml --cov --cov-report=xml -qq -o console_output_style=count -p no:sugar tests
|
||||
codecov --token $(codecovToken)
|
||||
displayName: 'Run pytest for python $(python.container) / coverage'
|
||||
condition: and(succeeded(), eq(variables['python.container'], variables['PythonMain']))
|
||||
- task: PublishTestResults@2
|
||||
condition: succeededOrFailed()
|
||||
inputs:
|
||||
testResultsFiles: 'test-results.xml'
|
||||
testRunTitle: 'Publish test results for Python $(python.container)'
|
||||
- task: PublishCodeCoverageResults@1
|
||||
inputs:
|
||||
codeCoverageTool: cobertura
|
||||
summaryFileLocation: coverage.xml
|
||||
displayName: 'publish coverage artifact'
|
||||
condition: and(succeeded(), eq(variables['python.container'], variables['PythonMain']))
|
||||
|
||||
- stage: 'FullCheck'
|
||||
dependsOn:
|
||||
- 'Overview'
|
||||
jobs:
|
||||
- job: 'Pylint'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
container: $[ variables['PythonMain'] ]
|
||||
steps:
|
||||
- script: |
|
||||
python --version > .cache
|
||||
displayName: 'Set python $(PythonMain) for requirement cache'
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.RestoreCacheV1.RestoreCache@1
|
||||
displayName: 'Restore artifacts based on Requirements'
|
||||
inputs:
|
||||
keyfile: 'requirements_all.txt, requirements_test.txt, .cache, homeassistant/package_constraints.txt'
|
||||
targetfolder: './venv'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
- script: |
|
||||
set -e
|
||||
python -m venv venv
|
||||
|
||||
. venv/bin/activate
|
||||
pip install -U pip setuptools
|
||||
pip install -r requirements_all.txt -c homeassistant/package_constraints.txt
|
||||
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
|
||||
displayName: 'Create Virtual Environment & Install Requirements'
|
||||
condition: and(succeeded(), ne(variables['CacheRestored'], 'true'))
|
||||
- task: 1ESLighthouseEng.PipelineArtifactCaching.SaveCacheV1.SaveCache@1
|
||||
displayName: 'Save artifacts based on Requirements'
|
||||
inputs:
|
||||
keyfile: 'requirements_all.txt, requirements_test.txt, .cache, homeassistant/package_constraints.txt'
|
||||
targetfolder: './venv'
|
||||
vstsFeed: '$(ArtifactFeed)'
|
||||
- script: |
|
||||
. venv/bin/activate
|
||||
pip install -e .
|
||||
displayName: 'Install Home Assistant for python $(PythonMain)'
|
||||
- script: |
|
||||
. venv/bin/activate
|
||||
pylint homeassistant
|
||||
displayName: 'Run pylint'
|
||||
- job: 'Mypy'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
container: $[ variables['PythonMain'] ]
|
||||
steps:
|
||||
- script: |
|
||||
python -m venv venv
|
||||
|
||||
. venv/bin/activate
|
||||
pip install -r requirements_test.txt -c homeassistant/package_constraints.txt
|
||||
displayName: 'Setup Env'
|
||||
- script: |
|
||||
TYPING_FILES=$(cat mypyrc)
|
||||
echo -e "Run mypy on: \n$TYPING_FILES"
|
||||
|
||||
. venv/bin/activate
|
||||
mypy $TYPING_FILES
|
||||
displayName: 'Run mypy'
|
||||
158
azure-pipelines-release.yml
Normal file
158
azure-pipelines-release.yml
Normal file
@@ -0,0 +1,158 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
tags:
|
||||
include:
|
||||
- '*'
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: '5.2'
|
||||
- group: docker
|
||||
- group: github
|
||||
- group: twine
|
||||
|
||||
|
||||
stages:
|
||||
|
||||
- stage: 'Validate'
|
||||
jobs:
|
||||
- job: 'VersionValidate'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: |
|
||||
setup_version="$(python setup.py -V)"
|
||||
branch_version="$(Build.SourceBranchName)"
|
||||
|
||||
if [ "${setup_version}" != "${branch_version}" ]; then
|
||||
echo "Version of tag ${branch_version} don't match with ${setup_version}!"
|
||||
exit 1
|
||||
fi
|
||||
displayName: 'Check version of branch/tag'
|
||||
- script: |
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
jq curl
|
||||
|
||||
release="$(Build.SourceBranchName)"
|
||||
created_by="$(curl -s https://api.github.com/repos/home-assistant/home-assistant/releases/tags/${release} | jq --raw-output '.author.login')"
|
||||
|
||||
if [[ "${created_by}" =~ ^(balloob|pvizeli|fabaff|robbiet480)$ ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "${created_by} is not allowed to create an release!"
|
||||
exit 1
|
||||
displayName: 'Check rights'
|
||||
|
||||
- stage: 'Build'
|
||||
jobs:
|
||||
- job: 'ReleasePython'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- task: UsePythonVersion@0
|
||||
displayName: 'Use Python 3.7'
|
||||
inputs:
|
||||
versionSpec: '3.7'
|
||||
- script: pip install twine wheel
|
||||
displayName: 'Install tools'
|
||||
- script: python setup.py sdist bdist_wheel
|
||||
displayName: 'Build package'
|
||||
- script: |
|
||||
export TWINE_USERNAME="$(twineUser)"
|
||||
export TWINE_PASSWORD="$(twinePassword)"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
displayName: 'Upload pypi'
|
||||
- job: 'ReleaseDocker'
|
||||
timeoutInMinutes: 240
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
buildMachine: 'qemux86-64,intel-nuc'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
buildMachine: 'qemux86'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
buildMachine: 'qemuarm,raspberrypi'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
buildMachine: 'raspberrypi2,raspberrypi3,raspberrypi4,odroid-xu,tinker'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
buildMachine: 'qemuarm-64,raspberrypi3-64,raspberrypi4-64,odroid-c2,orangepi-prime'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--homeassistant $(Build.SourceBranchName) "--$(buildArch)" \
|
||||
-r https://github.com/home-assistant/hassio-homeassistant \
|
||||
-t generic --docker-hub homeassistant
|
||||
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--homeassistant-machine "$(Build.SourceBranchName)=$(buildMachine)" \
|
||||
-r https://github.com/home-assistant/hassio-homeassistant \
|
||||
-t machine --docker-hub homeassistant
|
||||
displayName: 'Build Release'
|
||||
|
||||
- stage: 'Publish'
|
||||
jobs:
|
||||
- job: 'ReleaseHassio'
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
git jq curl
|
||||
|
||||
git config --global user.name "Pascal Vizeli"
|
||||
git config --global user.email "pvizeli@syshack.ch"
|
||||
git config --global credential.helper store
|
||||
|
||||
echo "https://$(githubToken):x-oauth-basic@github.com" > $HOME/.git-credentials
|
||||
displayName: 'Install requirements'
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
version="$(Build.SourceBranchName)"
|
||||
|
||||
git clone https://github.com/home-assistant/hassio-version
|
||||
cd hassio-version
|
||||
|
||||
dev_version="$(jq --raw-output '.homeassistant.default' dev.json)"
|
||||
beta_version="$(jq --raw-output '.homeassistant.default' beta.json)"
|
||||
stable_version="$(jq --raw-output '.homeassistant.default' stable.json)"
|
||||
|
||||
if [[ "$version" =~ b ]]; then
|
||||
sed -i "s|$dev_version|$version|g" dev.json
|
||||
sed -i "s|$beta_version|$version|g" beta.json
|
||||
else
|
||||
sed -i "s|$dev_version|$version|g" dev.json
|
||||
sed -i "s|$beta_version|$version|g" beta.json
|
||||
sed -i "s|$stable_version|$version|g" stable.json
|
||||
fi
|
||||
|
||||
git commit -am "Bump Home Assistant $version"
|
||||
git push
|
||||
displayName: 'Update version files'
|
||||
99
azure-pipelines-wheels.yml
Normal file
99
azure-pipelines-wheels.yml
Normal file
@@ -0,0 +1,99 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
paths:
|
||||
include:
|
||||
- requirements_all.txt
|
||||
pr: none
|
||||
variables:
|
||||
- name: versionWheels
|
||||
value: '1.0-3.7-alpine3.10'
|
||||
- group: wheels
|
||||
|
||||
|
||||
jobs:
|
||||
|
||||
- job: 'Wheels'
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'ubuntu-latest'
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support \
|
||||
curl
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: 'Initial cross build'
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: 'Install ssh key'
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: 'Install wheels builder'
|
||||
- script: |
|
||||
cp requirements_all.txt requirements_wheels.txt
|
||||
if [[ "$(Build.Reason)" =~ (Schedule|Manual) ]]; then
|
||||
touch requirements_diff.txt
|
||||
else
|
||||
curl -s -o requirements_diff.txt https://raw.githubusercontent.com/home-assistant/home-assistant/master/requirements_all.txt
|
||||
fi
|
||||
|
||||
requirement_files="requirements_wheels.txt requirements_diff.txt"
|
||||
for requirement_file in ${requirement_files}; do
|
||||
sed -i "s|# pytradfri|pytradfri|g" ${requirement_file}
|
||||
sed -i "s|# pybluez|pybluez|g" ${requirement_file}
|
||||
sed -i "s|# bluepy|bluepy|g" ${requirement_file}
|
||||
sed -i "s|# beacontools|beacontools|g" ${requirement_file}
|
||||
sed -i "s|# RPi.GPIO|RPi.GPIO|g" ${requirement_file}
|
||||
sed -i "s|# raspihats|raspihats|g" ${requirement_file}
|
||||
sed -i "s|# rpi-rf|rpi-rf|g" ${requirement_file}
|
||||
sed -i "s|# blinkt|blinkt|g" ${requirement_file}
|
||||
sed -i "s|# fritzconnection|fritzconnection|g" ${requirement_file}
|
||||
sed -i "s|# pyuserinput|pyuserinput|g" ${requirement_file}
|
||||
sed -i "s|# evdev|evdev|g" ${requirement_file}
|
||||
sed -i "s|# smbus-cffi|smbus-cffi|g" ${requirement_file}
|
||||
sed -i "s|# i2csense|i2csense|g" ${requirement_file}
|
||||
sed -i "s|# python-eq3bt|python-eq3bt|g" ${requirement_file}
|
||||
sed -i "s|# pycups|pycups|g" ${requirement_file}
|
||||
sed -i "s|# homekit|homekit|g" ${requirement_file}
|
||||
sed -i "s|# decora_wifi|decora_wifi|g" ${requirement_file}
|
||||
sed -i "s|# decora|decora|g" ${requirement_file}
|
||||
sed -i "s|# PySwitchbot|PySwitchbot|g" ${requirement_file}
|
||||
sed -i "s|# pySwitchmate|pySwitchmate|g" ${requirement_file}
|
||||
sed -i "s|# face_recognition|face_recognition|g" ${requirement_file}
|
||||
done
|
||||
displayName: 'Prepare requirements files for Hass.io'
|
||||
- script: |
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;cmake;git;linux-headers;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;linux-headers;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev" \
|
||||
--index $(wheelsIndex) \
|
||||
--requirement requirements_wheels.txt \
|
||||
--requirement-diff requirements_diff.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: 'Run wheels build'
|
||||
@@ -1,143 +0,0 @@
|
||||
# https://dev.azure.com/home-assistant
|
||||
|
||||
trigger:
|
||||
batch: true
|
||||
branches:
|
||||
include:
|
||||
- dev
|
||||
tags:
|
||||
include:
|
||||
- '*'
|
||||
|
||||
variables:
|
||||
- name: versionBuilder
|
||||
value: '3.2'
|
||||
- name: versionWheels
|
||||
value: '0.3'
|
||||
- group: docker
|
||||
- group: wheels
|
||||
|
||||
jobs:
|
||||
|
||||
- job: 'Wheels'
|
||||
condition: eq(variables['Build.SourceBranchName'], 'dev')
|
||||
timeoutInMinutes: 360
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
strategy:
|
||||
maxParallel: 3
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
steps:
|
||||
- script: |
|
||||
sudo apt-get install -y --no-install-recommends \
|
||||
qemu-user-static \
|
||||
binfmt-support
|
||||
|
||||
sudo mount binfmt_misc -t binfmt_misc /proc/sys/fs/binfmt_misc
|
||||
sudo update-binfmts --enable qemu-arm
|
||||
sudo update-binfmts --enable qemu-aarch64
|
||||
displayName: 'Initial cross build'
|
||||
- script: |
|
||||
mkdir -p .ssh
|
||||
echo -e "-----BEGIN RSA PRIVATE KEY-----\n$(wheelsSSH)\n-----END RSA PRIVATE KEY-----" >> .ssh/id_rsa
|
||||
ssh-keyscan -H $(wheelsHost) >> .ssh/known_hosts
|
||||
chmod 600 .ssh/*
|
||||
displayName: 'Install ssh key'
|
||||
- script: sudo docker pull homeassistant/$(buildArch)-wheels:$(versionWheels)
|
||||
displayName: 'Install wheels builder'
|
||||
- script: |
|
||||
cp requirements_all.txt requirements_hassio.txt
|
||||
|
||||
# Enable because we can build it
|
||||
sed -i "s|# pytradfri|pytradfri|g" requirements_hassio.txt
|
||||
sed -i "s|# pybluez|pybluez|g" requirements_hassio.txt
|
||||
sed -i "s|# bluepy|bluepy|g" requirements_hassio.txt
|
||||
sed -i "s|# beacontools|beacontools|g" requirements_hassio.txt
|
||||
sed -i "s|# RPi.GPIO|RPi.GPIO|g" requirements_hassio.txt
|
||||
sed -i "s|# raspihats|raspihats|g" requirements_hassio.txt
|
||||
sed -i "s|# rpi-rf|rpi-rf|g" requirements_hassio.txt
|
||||
sed -i "s|# blinkt|blinkt|g" requirements_hassio.txt
|
||||
sed -i "s|# fritzconnection|fritzconnection|g" requirements_hassio.txt
|
||||
sed -i "s|# pyuserinput|pyuserinput|g" requirements_hassio.txt
|
||||
sed -i "s|# evdev|evdev|g" requirements_hassio.txt
|
||||
sed -i "s|# smbus-cffi|smbus-cffi|g" requirements_hassio.txt
|
||||
sed -i "s|# i2csense|i2csense|g" requirements_hassio.txt
|
||||
sed -i "s|# python-eq3bt|python-eq3bt|g" requirements_hassio.txt
|
||||
sed -i "s|# pycups|pycups|g" requirements_hassio.txt
|
||||
sed -i "s|# homekit|homekit|g" requirements_hassio.txt
|
||||
sed -i "s|# decora_wifi|decora_wifi|g" requirements_hassio.txt
|
||||
sed -i "s|# decora|decora|g" requirements_hassio.txt
|
||||
sed -i "s|# PySwitchbot|PySwitchbot|g" requirements_hassio.txt
|
||||
sed -i "s|# pySwitchmate|pySwitchmate|g" requirements_hassio.txt
|
||||
|
||||
# Disable because of error
|
||||
sed -i "s|insteonplm|# insteonplm|g" requirements_hassio.txt
|
||||
displayName: 'Prepare requirements files for Hass.io'
|
||||
- script: |
|
||||
sudo docker run --rm -v $(pwd):/data:ro -v $(pwd)/.ssh:/root/.ssh:rw \
|
||||
homeassistant/$(buildArch)-wheels:$(versionWheels) \
|
||||
--apk "build-base;cmake;git;linux-headers;bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;autoconf;automake;cups-dev;linux-headers;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev" \
|
||||
--index https://wheels.hass.io \
|
||||
--requirement requirements_hassio.txt \
|
||||
--upload rsync \
|
||||
--remote wheels@$(wheelsHost):/opt/wheels
|
||||
displayName: 'Run wheels build'
|
||||
|
||||
|
||||
- job: 'Release'
|
||||
condition: startsWith(variables['Build.SourceBranch'], 'refs/tags')
|
||||
timeoutInMinutes: 120
|
||||
pool:
|
||||
vmImage: 'ubuntu-16.04'
|
||||
strategy:
|
||||
maxParallel: 5
|
||||
matrix:
|
||||
amd64:
|
||||
buildArch: 'amd64'
|
||||
buildMachine: 'qemux86-64,intel-nuc'
|
||||
i386:
|
||||
buildArch: 'i386'
|
||||
buildMachine: 'qemux86'
|
||||
armhf:
|
||||
buildArch: 'armhf'
|
||||
buildMachine: 'qemuarm,raspberrypi'
|
||||
armv7:
|
||||
buildArch: 'armv7'
|
||||
buildMachine: 'raspberrypi2,raspberrypi3,odroid-xu,tinker'
|
||||
aarch64:
|
||||
buildArch: 'aarch64'
|
||||
buildMachine: 'qemuarm-64,raspberrypi3-64,odroid-c2,orangepi-prime'
|
||||
steps:
|
||||
- script: sudo docker login -u $(dockerUser) -p $(dockerPassword)
|
||||
displayName: 'Docker hub login'
|
||||
- script: sudo docker pull homeassistant/amd64-builder:$(versionBuilder)
|
||||
displayName: 'Install Builder'
|
||||
- script: |
|
||||
set -e
|
||||
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--homeassistant $(Build.SourceBranchName) "--$(buildArch)" \
|
||||
-r https://github.com/home-assistant/hassio-homeassistant \
|
||||
-t generic --docker-hub homeassistant
|
||||
|
||||
sudo docker run --rm --privileged \
|
||||
-v ~/.docker:/root/.docker \
|
||||
-v /run/docker.sock:/run/docker.sock:rw \
|
||||
homeassistant/amd64-builder:$(versionBuilder) \
|
||||
--homeassistant-machine "$(Build.SourceBranchName)=$(buildMachine)" \
|
||||
-r https://github.com/home-assistant/hassio-homeassistant \
|
||||
-t machine --docker-hub homeassistant
|
||||
displayName: 'Build Release'
|
||||
@@ -7,9 +7,7 @@ import platform
|
||||
import subprocess
|
||||
import sys
|
||||
import threading
|
||||
from typing import ( # noqa pylint: disable=unused-import
|
||||
List, Dict, Any, TYPE_CHECKING
|
||||
)
|
||||
from typing import List, Dict, Any, TYPE_CHECKING # noqa pylint: disable=unused-import
|
||||
|
||||
from homeassistant import monkey_patch
|
||||
from homeassistant.const import (
|
||||
@@ -30,11 +28,12 @@ def set_loop() -> None:
|
||||
|
||||
policy = None
|
||||
|
||||
if sys.platform == 'win32':
|
||||
if hasattr(asyncio, 'WindowsProactorEventLoopPolicy'):
|
||||
if sys.platform == "win32":
|
||||
if hasattr(asyncio, "WindowsProactorEventLoopPolicy"):
|
||||
# pylint: disable=no-member
|
||||
policy = asyncio.WindowsProactorEventLoopPolicy()
|
||||
else:
|
||||
|
||||
class ProactorPolicy(BaseDefaultEventLoopPolicy):
|
||||
"""Event loop policy to create proactor loops."""
|
||||
|
||||
@@ -56,28 +55,40 @@ def set_loop() -> None:
|
||||
def validate_python() -> None:
|
||||
"""Validate that the right Python version is running."""
|
||||
if sys.version_info[:3] < REQUIRED_PYTHON_VER:
|
||||
print("Home Assistant requires at least Python {}.{}.{}".format(
|
||||
*REQUIRED_PYTHON_VER))
|
||||
print(
|
||||
"Home Assistant requires at least Python {}.{}.{}".format(
|
||||
*REQUIRED_PYTHON_VER
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def ensure_config_path(config_dir: str) -> None:
|
||||
"""Validate the configuration directory."""
|
||||
import homeassistant.config as config_util
|
||||
lib_dir = os.path.join(config_dir, 'deps')
|
||||
|
||||
lib_dir = os.path.join(config_dir, "deps")
|
||||
|
||||
# Test if configuration directory exists
|
||||
if not os.path.isdir(config_dir):
|
||||
if config_dir != config_util.get_default_config_dir():
|
||||
print(('Fatal Error: Specified configuration directory does '
|
||||
'not exist {} ').format(config_dir))
|
||||
print(
|
||||
(
|
||||
"Fatal Error: Specified configuration directory does "
|
||||
"not exist {} "
|
||||
).format(config_dir)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
os.mkdir(config_dir)
|
||||
except OSError:
|
||||
print(('Fatal Error: Unable to create default configuration '
|
||||
'directory {} ').format(config_dir))
|
||||
print(
|
||||
(
|
||||
"Fatal Error: Unable to create default configuration "
|
||||
"directory {} "
|
||||
).format(config_dir)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# Test if library directory exists
|
||||
@@ -85,20 +96,22 @@ def ensure_config_path(config_dir: str) -> None:
|
||||
try:
|
||||
os.mkdir(lib_dir)
|
||||
except OSError:
|
||||
print(('Fatal Error: Unable to create library '
|
||||
'directory {} ').format(lib_dir))
|
||||
print(
|
||||
("Fatal Error: Unable to create library " "directory {} ").format(
|
||||
lib_dir
|
||||
)
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
async def ensure_config_file(hass: 'core.HomeAssistant', config_dir: str) \
|
||||
-> str:
|
||||
async def ensure_config_file(hass: "core.HomeAssistant", config_dir: str) -> str:
|
||||
"""Ensure configuration file exists."""
|
||||
import homeassistant.config as config_util
|
||||
config_path = await config_util.async_ensure_config_exists(
|
||||
hass, config_dir)
|
||||
|
||||
config_path = await config_util.async_ensure_config_exists(hass, config_dir)
|
||||
|
||||
if config_path is None:
|
||||
print('Error getting configuration path')
|
||||
print("Error getting configuration path")
|
||||
sys.exit(1)
|
||||
|
||||
return config_path
|
||||
@@ -107,71 +120,72 @@ async def ensure_config_file(hass: 'core.HomeAssistant', config_dir: str) \
|
||||
def get_arguments() -> argparse.Namespace:
|
||||
"""Get parsed passed in arguments."""
|
||||
import homeassistant.config as config_util
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Home Assistant: Observe, Control, Automate.")
|
||||
parser.add_argument('--version', action='version', version=__version__)
|
||||
description="Home Assistant: Observe, Control, Automate."
|
||||
)
|
||||
parser.add_argument("--version", action="version", version=__version__)
|
||||
parser.add_argument(
|
||||
'-c', '--config',
|
||||
metavar='path_to_config_dir',
|
||||
"-c",
|
||||
"--config",
|
||||
metavar="path_to_config_dir",
|
||||
default=config_util.get_default_config_dir(),
|
||||
help="Directory that contains the Home Assistant configuration")
|
||||
help="Directory that contains the Home Assistant configuration",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--demo-mode',
|
||||
action='store_true',
|
||||
help='Start Home Assistant in demo mode')
|
||||
"--demo-mode", action="store_true", help="Start Home Assistant in demo mode"
|
||||
)
|
||||
parser.add_argument(
|
||||
'--debug',
|
||||
action='store_true',
|
||||
help='Start Home Assistant in debug mode')
|
||||
"--debug", action="store_true", help="Start Home Assistant in debug mode"
|
||||
)
|
||||
parser.add_argument(
|
||||
'--open-ui',
|
||||
action='store_true',
|
||||
help='Open the webinterface in a browser')
|
||||
"--open-ui", action="store_true", help="Open the webinterface in a browser"
|
||||
)
|
||||
parser.add_argument(
|
||||
'--skip-pip',
|
||||
action='store_true',
|
||||
help='Skips pip install of required packages on startup')
|
||||
"--skip-pip",
|
||||
action="store_true",
|
||||
help="Skips pip install of required packages on startup",
|
||||
)
|
||||
parser.add_argument(
|
||||
'-v', '--verbose',
|
||||
action='store_true',
|
||||
help="Enable verbose logging to file.")
|
||||
"-v", "--verbose", action="store_true", help="Enable verbose logging to file."
|
||||
)
|
||||
parser.add_argument(
|
||||
'--pid-file',
|
||||
metavar='path_to_pid_file',
|
||||
"--pid-file",
|
||||
metavar="path_to_pid_file",
|
||||
default=None,
|
||||
help='Path to PID file useful for running as daemon')
|
||||
help="Path to PID file useful for running as daemon",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--log-rotate-days',
|
||||
"--log-rotate-days",
|
||||
type=int,
|
||||
default=None,
|
||||
help='Enables daily log rotation and keeps up to the specified days')
|
||||
help="Enables daily log rotation and keeps up to the specified days",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--log-file',
|
||||
"--log-file",
|
||||
type=str,
|
||||
default=None,
|
||||
help='Log file to write to. If not set, CONFIG/home-assistant.log '
|
||||
'is used')
|
||||
help="Log file to write to. If not set, CONFIG/home-assistant.log " "is used",
|
||||
)
|
||||
parser.add_argument(
|
||||
'--log-no-color',
|
||||
action='store_true',
|
||||
help="Disable color logs")
|
||||
"--log-no-color", action="store_true", help="Disable color logs"
|
||||
)
|
||||
parser.add_argument(
|
||||
'--runner',
|
||||
action='store_true',
|
||||
help='On restart exit with code {}'.format(RESTART_EXIT_CODE))
|
||||
"--runner",
|
||||
action="store_true",
|
||||
help="On restart exit with code {}".format(RESTART_EXIT_CODE),
|
||||
)
|
||||
parser.add_argument(
|
||||
'--script',
|
||||
nargs=argparse.REMAINDER,
|
||||
help='Run one of the embedded scripts')
|
||||
"--script", nargs=argparse.REMAINDER, help="Run one of the embedded scripts"
|
||||
)
|
||||
if os.name == "posix":
|
||||
parser.add_argument(
|
||||
'--daemon',
|
||||
action='store_true',
|
||||
help='Run Home Assistant as daemon')
|
||||
"--daemon", action="store_true", help="Run Home Assistant as daemon"
|
||||
)
|
||||
|
||||
arguments = parser.parse_args()
|
||||
if os.name != "posix" or arguments.debug or arguments.runner:
|
||||
setattr(arguments, 'daemon', False)
|
||||
setattr(arguments, "daemon", False)
|
||||
|
||||
return arguments
|
||||
|
||||
@@ -192,8 +206,8 @@ def daemonize() -> None:
|
||||
sys.exit(0)
|
||||
|
||||
# redirect standard file descriptors to devnull
|
||||
infd = open(os.devnull, 'r')
|
||||
outfd = open(os.devnull, 'a+')
|
||||
infd = open(os.devnull, "r")
|
||||
outfd = open(os.devnull, "a+")
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
os.dup2(infd.fileno(), sys.stdin.fileno())
|
||||
@@ -205,7 +219,7 @@ def check_pid(pid_file: str) -> None:
|
||||
"""Check that Home Assistant is not already running."""
|
||||
# Check pid file
|
||||
try:
|
||||
with open(pid_file, 'r') as file:
|
||||
with open(pid_file, "r") as file:
|
||||
pid = int(file.readline())
|
||||
except IOError:
|
||||
# PID File does not exist
|
||||
@@ -220,7 +234,7 @@ def check_pid(pid_file: str) -> None:
|
||||
except OSError:
|
||||
# PID does not exist
|
||||
return
|
||||
print('Fatal Error: HomeAssistant is already running.')
|
||||
print("Fatal Error: HomeAssistant is already running.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -228,10 +242,10 @@ def write_pid(pid_file: str) -> None:
|
||||
"""Create a PID File."""
|
||||
pid = os.getpid()
|
||||
try:
|
||||
with open(pid_file, 'w') as file:
|
||||
with open(pid_file, "w") as file:
|
||||
file.write(str(pid))
|
||||
except IOError:
|
||||
print('Fatal Error: Unable to write pid file {}'.format(pid_file))
|
||||
print("Fatal Error: Unable to write pid file {}".format(pid_file))
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@@ -255,17 +269,15 @@ def closefds_osx(min_fd: int, max_fd: int) -> None:
|
||||
|
||||
def cmdline() -> List[str]:
|
||||
"""Collect path and arguments to re-execute the current hass instance."""
|
||||
if os.path.basename(sys.argv[0]) == '__main__.py':
|
||||
if os.path.basename(sys.argv[0]) == "__main__.py":
|
||||
modulepath = os.path.dirname(sys.argv[0])
|
||||
os.environ['PYTHONPATH'] = os.path.dirname(modulepath)
|
||||
return [sys.executable] + [arg for arg in sys.argv if
|
||||
arg != '--daemon']
|
||||
os.environ["PYTHONPATH"] = os.path.dirname(modulepath)
|
||||
return [sys.executable] + [arg for arg in sys.argv if arg != "--daemon"]
|
||||
|
||||
return [arg for arg in sys.argv if arg != '--daemon']
|
||||
return [arg for arg in sys.argv if arg != "--daemon"]
|
||||
|
||||
|
||||
async def setup_and_run_hass(config_dir: str,
|
||||
args: argparse.Namespace) -> int:
|
||||
async def setup_and_run_hass(config_dir: str, args: argparse.Namespace) -> int:
|
||||
"""Set up HASS and run."""
|
||||
# pylint: disable=redefined-outer-name
|
||||
from homeassistant import bootstrap, core
|
||||
@@ -273,21 +285,29 @@ async def setup_and_run_hass(config_dir: str,
|
||||
hass = core.HomeAssistant()
|
||||
|
||||
if args.demo_mode:
|
||||
config = {
|
||||
'frontend': {},
|
||||
'demo': {}
|
||||
} # type: Dict[str, Any]
|
||||
config = {"frontend": {}, "demo": {}} # type: Dict[str, Any]
|
||||
bootstrap.async_from_config_dict(
|
||||
config, hass, config_dir=config_dir, verbose=args.verbose,
|
||||
skip_pip=args.skip_pip, log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file, log_no_color=args.log_no_color)
|
||||
config,
|
||||
hass,
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
skip_pip=args.skip_pip,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
)
|
||||
else:
|
||||
config_file = await ensure_config_file(hass, config_dir)
|
||||
print('Config directory:', config_dir)
|
||||
print("Config directory:", config_dir)
|
||||
await bootstrap.async_from_config_file(
|
||||
config_file, hass, verbose=args.verbose, skip_pip=args.skip_pip,
|
||||
log_rotate_days=args.log_rotate_days, log_file=args.log_file,
|
||||
log_no_color=args.log_no_color)
|
||||
config_file,
|
||||
hass,
|
||||
verbose=args.verbose,
|
||||
skip_pip=args.skip_pip,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
)
|
||||
|
||||
if args.open_ui:
|
||||
# Imported here to avoid importing asyncio before monkey patch
|
||||
@@ -297,12 +317,14 @@ async def setup_and_run_hass(config_dir: str,
|
||||
"""Open the web interface in a browser."""
|
||||
if hass.config.api is not None:
|
||||
import webbrowser
|
||||
|
||||
webbrowser.open(hass.config.api.base_url)
|
||||
|
||||
run_callback_threadsafe(
|
||||
hass.loop,
|
||||
hass.bus.async_listen_once,
|
||||
EVENT_HOMEASSISTANT_START, open_browser
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
open_browser,
|
||||
)
|
||||
|
||||
return await hass.async_run()
|
||||
@@ -312,17 +334,17 @@ def try_to_restart() -> None:
|
||||
"""Attempt to clean up state and start a new Home Assistant instance."""
|
||||
# Things should be mostly shut down already at this point, now just try
|
||||
# to clean up things that may have been left behind.
|
||||
sys.stderr.write('Home Assistant attempting to restart.\n')
|
||||
sys.stderr.write("Home Assistant attempting to restart.\n")
|
||||
|
||||
# Count remaining threads, ideally there should only be one non-daemonized
|
||||
# thread left (which is us). Nothing we really do with it, but it might be
|
||||
# useful when debugging shutdown/restart issues.
|
||||
try:
|
||||
nthreads = sum(thread.is_alive() and not thread.daemon
|
||||
for thread in threading.enumerate())
|
||||
nthreads = sum(
|
||||
thread.is_alive() and not thread.daemon for thread in threading.enumerate()
|
||||
)
|
||||
if nthreads > 1:
|
||||
sys.stderr.write(
|
||||
"Found {} non-daemonic threads.\n".format(nthreads))
|
||||
sys.stderr.write("Found {} non-daemonic threads.\n".format(nthreads))
|
||||
|
||||
# Somehow we sometimes seem to trigger an assertion in the python threading
|
||||
# module. It seems we find threads that have no associated OS level thread
|
||||
@@ -336,7 +358,7 @@ def try_to_restart() -> None:
|
||||
except ValueError:
|
||||
max_fd = 256
|
||||
|
||||
if platform.system() == 'Darwin':
|
||||
if platform.system() == "Darwin":
|
||||
closefds_osx(3, max_fd)
|
||||
else:
|
||||
os.closerange(3, max_fd)
|
||||
@@ -355,16 +377,15 @@ def main() -> int:
|
||||
validate_python()
|
||||
|
||||
monkey_patch_needed = sys.version_info[:3] < (3, 6, 3)
|
||||
if monkey_patch_needed and os.environ.get('HASS_NO_MONKEY') != '1':
|
||||
if sys.version_info[:2] >= (3, 6):
|
||||
monkey_patch.disable_c_asyncio()
|
||||
if monkey_patch_needed and os.environ.get("HASS_NO_MONKEY") != "1":
|
||||
monkey_patch.disable_c_asyncio()
|
||||
monkey_patch.patch_weakref_tasks()
|
||||
|
||||
set_loop()
|
||||
|
||||
# Run a simple daemon runner process on Windows to handle restarts
|
||||
if os.name == 'nt' and '--runner' not in sys.argv:
|
||||
nt_args = cmdline() + ['--runner']
|
||||
if os.name == "nt" and "--runner" not in sys.argv:
|
||||
nt_args = cmdline() + ["--runner"]
|
||||
while True:
|
||||
try:
|
||||
subprocess.check_call(nt_args)
|
||||
@@ -379,6 +400,7 @@ def main() -> int:
|
||||
|
||||
if args.script is not None:
|
||||
from homeassistant import scripts
|
||||
|
||||
return scripts.run(args.script)
|
||||
|
||||
config_dir = os.path.join(os.getcwd(), args.config)
|
||||
@@ -393,6 +415,7 @@ def main() -> int:
|
||||
write_pid(args.pid_file)
|
||||
|
||||
from homeassistant.util.async_ import asyncio_run
|
||||
|
||||
exit_code = asyncio_run(setup_and_run_hass(config_dir, args))
|
||||
if exit_code == RESTART_EXIT_CODE and not args.runner:
|
||||
try_to_restart()
|
||||
|
||||
@@ -17,8 +17,8 @@ from .const import GROUP_ID_ADMIN
|
||||
from .mfa_modules import auth_mfa_module_from_config, MultiFactorAuthModule
|
||||
from .providers import auth_provider_from_config, AuthProvider, LoginFlow
|
||||
|
||||
EVENT_USER_ADDED = 'user_added'
|
||||
EVENT_USER_REMOVED = 'user_removed'
|
||||
EVENT_USER_ADDED = "user_added"
|
||||
EVENT_USER_REMOVED = "user_removed"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_MfaModuleDict = Dict[str, MultiFactorAuthModule]
|
||||
@@ -27,9 +27,10 @@ _ProviderDict = Dict[_ProviderKey, AuthProvider]
|
||||
|
||||
|
||||
async def auth_manager_from_config(
|
||||
hass: HomeAssistant,
|
||||
provider_configs: List[Dict[str, Any]],
|
||||
module_configs: List[Dict[str, Any]]) -> 'AuthManager':
|
||||
hass: HomeAssistant,
|
||||
provider_configs: List[Dict[str, Any]],
|
||||
module_configs: List[Dict[str, Any]],
|
||||
) -> "AuthManager":
|
||||
"""Initialize an auth manager from config.
|
||||
|
||||
CORE_CONFIG_SCHEMA will make sure do duplicated auth providers or
|
||||
@@ -38,8 +39,11 @@ async def auth_manager_from_config(
|
||||
store = auth_store.AuthStore(hass)
|
||||
if provider_configs:
|
||||
providers = await asyncio.gather(
|
||||
*[auth_provider_from_config(hass, store, config)
|
||||
for config in provider_configs])
|
||||
*(
|
||||
auth_provider_from_config(hass, store, config)
|
||||
for config in provider_configs
|
||||
)
|
||||
)
|
||||
else:
|
||||
providers = ()
|
||||
# So returned auth providers are in same order as config
|
||||
@@ -50,8 +54,8 @@ async def auth_manager_from_config(
|
||||
|
||||
if module_configs:
|
||||
modules = await asyncio.gather(
|
||||
*[auth_mfa_module_from_config(hass, config)
|
||||
for config in module_configs])
|
||||
*(auth_mfa_module_from_config(hass, config) for config in module_configs)
|
||||
)
|
||||
else:
|
||||
modules = ()
|
||||
# So returned auth modules are in same order as config
|
||||
@@ -66,17 +70,21 @@ async def auth_manager_from_config(
|
||||
class AuthManager:
|
||||
"""Manage the authentication for Home Assistant."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, store: auth_store.AuthStore,
|
||||
providers: _ProviderDict, mfa_modules: _MfaModuleDict) \
|
||||
-> None:
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
store: auth_store.AuthStore,
|
||||
providers: _ProviderDict,
|
||||
mfa_modules: _MfaModuleDict,
|
||||
) -> None:
|
||||
"""Initialize the auth manager."""
|
||||
self.hass = hass
|
||||
self._store = store
|
||||
self._providers = providers
|
||||
self._mfa_modules = mfa_modules
|
||||
self.login_flow = data_entry_flow.FlowManager(
|
||||
hass, self._async_create_login_flow,
|
||||
self._async_finish_login_flow)
|
||||
hass, self._async_create_login_flow, self._async_finish_login_flow
|
||||
)
|
||||
|
||||
@property
|
||||
def support_legacy(self) -> bool:
|
||||
@@ -86,7 +94,7 @@ class AuthManager:
|
||||
Should be removed when we removed legacy_api_password auth providers.
|
||||
"""
|
||||
for provider_type, _ in self._providers:
|
||||
if provider_type == 'legacy_api_password':
|
||||
if provider_type == "legacy_api_password":
|
||||
return True
|
||||
return False
|
||||
|
||||
@@ -100,20 +108,21 @@ class AuthManager:
|
||||
"""Return a list of available auth modules."""
|
||||
return list(self._mfa_modules.values())
|
||||
|
||||
def get_auth_provider(self, provider_type: str, provider_id: str) \
|
||||
-> Optional[AuthProvider]:
|
||||
def get_auth_provider(
|
||||
self, provider_type: str, provider_id: str
|
||||
) -> Optional[AuthProvider]:
|
||||
"""Return an auth provider, None if not found."""
|
||||
return self._providers.get((provider_type, provider_id))
|
||||
|
||||
def get_auth_providers(self, provider_type: str) \
|
||||
-> List[AuthProvider]:
|
||||
def get_auth_providers(self, provider_type: str) -> List[AuthProvider]:
|
||||
"""Return a List of auth provider of one type, Empty if not found."""
|
||||
return [provider
|
||||
for (p_type, _), provider in self._providers.items()
|
||||
if p_type == provider_type]
|
||||
return [
|
||||
provider
|
||||
for (p_type, _), provider in self._providers.items()
|
||||
if p_type == provider_type
|
||||
]
|
||||
|
||||
def get_auth_mfa_module(self, module_id: str) \
|
||||
-> Optional[MultiFactorAuthModule]:
|
||||
def get_auth_mfa_module(self, module_id: str) -> Optional[MultiFactorAuthModule]:
|
||||
"""Return a multi-factor auth module, None if not found."""
|
||||
return self._mfa_modules.get(module_id)
|
||||
|
||||
@@ -135,7 +144,8 @@ class AuthManager:
|
||||
return await self._store.async_get_group(group_id)
|
||||
|
||||
async def async_get_user_by_credentials(
|
||||
self, credentials: models.Credentials) -> Optional[models.User]:
|
||||
self, credentials: models.Credentials
|
||||
) -> Optional[models.User]:
|
||||
"""Get a user by credential, return None if not found."""
|
||||
for user in await self.async_get_users():
|
||||
for creds in user.credentials:
|
||||
@@ -145,57 +155,50 @@ class AuthManager:
|
||||
return None
|
||||
|
||||
async def async_create_system_user(
|
||||
self, name: str,
|
||||
group_ids: Optional[List[str]] = None) -> models.User:
|
||||
self, name: str, group_ids: Optional[List[str]] = None
|
||||
) -> models.User:
|
||||
"""Create a system user."""
|
||||
user = await self._store.async_create_user(
|
||||
name=name,
|
||||
system_generated=True,
|
||||
is_active=True,
|
||||
group_ids=group_ids or [],
|
||||
name=name, system_generated=True, is_active=True, group_ids=group_ids or []
|
||||
)
|
||||
|
||||
self.hass.bus.async_fire(EVENT_USER_ADDED, {
|
||||
'user_id': user.id
|
||||
})
|
||||
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
|
||||
|
||||
return user
|
||||
|
||||
async def async_create_user(self, name: str) -> models.User:
|
||||
"""Create a user."""
|
||||
kwargs = {
|
||||
'name': name,
|
||||
'is_active': True,
|
||||
'group_ids': [GROUP_ID_ADMIN]
|
||||
"name": name,
|
||||
"is_active": True,
|
||||
"group_ids": [GROUP_ID_ADMIN],
|
||||
} # type: Dict[str, Any]
|
||||
|
||||
if await self._user_should_be_owner():
|
||||
kwargs['is_owner'] = True
|
||||
kwargs["is_owner"] = True
|
||||
|
||||
user = await self._store.async_create_user(**kwargs)
|
||||
|
||||
self.hass.bus.async_fire(EVENT_USER_ADDED, {
|
||||
'user_id': user.id
|
||||
})
|
||||
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
|
||||
|
||||
return user
|
||||
|
||||
async def async_get_or_create_user(self, credentials: models.Credentials) \
|
||||
-> models.User:
|
||||
async def async_get_or_create_user(
|
||||
self, credentials: models.Credentials
|
||||
) -> models.User:
|
||||
"""Get or create a user."""
|
||||
if not credentials.is_new:
|
||||
user = await self.async_get_user_by_credentials(credentials)
|
||||
if user is None:
|
||||
raise ValueError('Unable to find the user.')
|
||||
raise ValueError("Unable to find the user.")
|
||||
return user
|
||||
|
||||
auth_provider = self._async_get_auth_provider(credentials)
|
||||
|
||||
if auth_provider is None:
|
||||
raise RuntimeError('Credential with unknown provider encountered')
|
||||
raise RuntimeError("Credential with unknown provider encountered")
|
||||
|
||||
info = await auth_provider.async_user_meta_for_credentials(
|
||||
credentials)
|
||||
info = await auth_provider.async_user_meta_for_credentials(credentials)
|
||||
|
||||
user = await self._store.async_create_user(
|
||||
credentials=credentials,
|
||||
@@ -204,14 +207,13 @@ class AuthManager:
|
||||
group_ids=[GROUP_ID_ADMIN],
|
||||
)
|
||||
|
||||
self.hass.bus.async_fire(EVENT_USER_ADDED, {
|
||||
'user_id': user.id
|
||||
})
|
||||
self.hass.bus.async_fire(EVENT_USER_ADDED, {"user_id": user.id})
|
||||
|
||||
return user
|
||||
|
||||
async def async_link_user(self, user: models.User,
|
||||
credentials: models.Credentials) -> None:
|
||||
async def async_link_user(
|
||||
self, user: models.User, credentials: models.Credentials
|
||||
) -> None:
|
||||
"""Link credentials to an existing user."""
|
||||
await self._store.async_link_user(user, credentials)
|
||||
|
||||
@@ -227,19 +229,20 @@ class AuthManager:
|
||||
|
||||
await self._store.async_remove_user(user)
|
||||
|
||||
self.hass.bus.async_fire(EVENT_USER_REMOVED, {
|
||||
'user_id': user.id
|
||||
})
|
||||
self.hass.bus.async_fire(EVENT_USER_REMOVED, {"user_id": user.id})
|
||||
|
||||
async def async_update_user(self, user: models.User,
|
||||
name: Optional[str] = None,
|
||||
group_ids: Optional[List[str]] = None) -> None:
|
||||
async def async_update_user(
|
||||
self,
|
||||
user: models.User,
|
||||
name: Optional[str] = None,
|
||||
group_ids: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
"""Update a user."""
|
||||
kwargs = {} # type: Dict[str,Any]
|
||||
if name is not None:
|
||||
kwargs['name'] = name
|
||||
kwargs["name"] = name
|
||||
if group_ids is not None:
|
||||
kwargs['group_ids'] = group_ids
|
||||
kwargs["group_ids"] = group_ids
|
||||
await self._store.async_update_user(user, **kwargs)
|
||||
|
||||
async def async_activate_user(self, user: models.User) -> None:
|
||||
@@ -249,47 +252,52 @@ class AuthManager:
|
||||
async def async_deactivate_user(self, user: models.User) -> None:
|
||||
"""Deactivate a user."""
|
||||
if user.is_owner:
|
||||
raise ValueError('Unable to deactive the owner')
|
||||
raise ValueError("Unable to deactive the owner")
|
||||
await self._store.async_deactivate_user(user)
|
||||
|
||||
async def async_remove_credentials(
|
||||
self, credentials: models.Credentials) -> None:
|
||||
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
|
||||
"""Remove credentials."""
|
||||
provider = self._async_get_auth_provider(credentials)
|
||||
|
||||
if (provider is not None and
|
||||
hasattr(provider, 'async_will_remove_credentials')):
|
||||
if provider is not None and hasattr(provider, "async_will_remove_credentials"):
|
||||
# https://github.com/python/mypy/issues/1424
|
||||
await provider.async_will_remove_credentials( # type: ignore
|
||||
credentials)
|
||||
credentials
|
||||
)
|
||||
|
||||
await self._store.async_remove_credentials(credentials)
|
||||
|
||||
async def async_enable_user_mfa(self, user: models.User,
|
||||
mfa_module_id: str, data: Any) -> None:
|
||||
async def async_enable_user_mfa(
|
||||
self, user: models.User, mfa_module_id: str, data: Any
|
||||
) -> None:
|
||||
"""Enable a multi-factor auth module for user."""
|
||||
if user.system_generated:
|
||||
raise ValueError('System generated users cannot enable '
|
||||
'multi-factor auth module.')
|
||||
raise ValueError(
|
||||
"System generated users cannot enable " "multi-factor auth module."
|
||||
)
|
||||
|
||||
module = self.get_auth_mfa_module(mfa_module_id)
|
||||
if module is None:
|
||||
raise ValueError('Unable find multi-factor auth module: {}'
|
||||
.format(mfa_module_id))
|
||||
raise ValueError(
|
||||
"Unable find multi-factor auth module: {}".format(mfa_module_id)
|
||||
)
|
||||
|
||||
await module.async_setup_user(user.id, data)
|
||||
|
||||
async def async_disable_user_mfa(self, user: models.User,
|
||||
mfa_module_id: str) -> None:
|
||||
async def async_disable_user_mfa(
|
||||
self, user: models.User, mfa_module_id: str
|
||||
) -> None:
|
||||
"""Disable a multi-factor auth module for user."""
|
||||
if user.system_generated:
|
||||
raise ValueError('System generated users cannot disable '
|
||||
'multi-factor auth module.')
|
||||
raise ValueError(
|
||||
"System generated users cannot disable " "multi-factor auth module."
|
||||
)
|
||||
|
||||
module = self.get_auth_mfa_module(mfa_module_id)
|
||||
if module is None:
|
||||
raise ValueError('Unable find multi-factor auth module: {}'
|
||||
.format(mfa_module_id))
|
||||
raise ValueError(
|
||||
"Unable find multi-factor auth module: {}".format(mfa_module_id)
|
||||
)
|
||||
|
||||
await module.async_depose_user(user.id)
|
||||
|
||||
@@ -302,20 +310,23 @@ class AuthManager:
|
||||
return modules
|
||||
|
||||
async def async_create_refresh_token(
|
||||
self, user: models.User, client_id: Optional[str] = None,
|
||||
client_name: Optional[str] = None,
|
||||
client_icon: Optional[str] = None,
|
||||
token_type: Optional[str] = None,
|
||||
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION) \
|
||||
-> models.RefreshToken:
|
||||
self,
|
||||
user: models.User,
|
||||
client_id: Optional[str] = None,
|
||||
client_name: Optional[str] = None,
|
||||
client_icon: Optional[str] = None,
|
||||
token_type: Optional[str] = None,
|
||||
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION,
|
||||
) -> models.RefreshToken:
|
||||
"""Create a new refresh token for a user."""
|
||||
if not user.is_active:
|
||||
raise ValueError('User is not active')
|
||||
raise ValueError("User is not active")
|
||||
|
||||
if user.system_generated and client_id is not None:
|
||||
raise ValueError(
|
||||
'System generated users cannot have refresh tokens connected '
|
||||
'to a client.')
|
||||
"System generated users cannot have refresh tokens connected "
|
||||
"to a client."
|
||||
)
|
||||
|
||||
if token_type is None:
|
||||
if user.system_generated:
|
||||
@@ -325,61 +336,76 @@ class AuthManager:
|
||||
|
||||
if user.system_generated != (token_type == models.TOKEN_TYPE_SYSTEM):
|
||||
raise ValueError(
|
||||
'System generated users can only have system type '
|
||||
'refresh tokens')
|
||||
"System generated users can only have system type " "refresh tokens"
|
||||
)
|
||||
|
||||
if token_type == models.TOKEN_TYPE_NORMAL and client_id is None:
|
||||
raise ValueError('Client is required to generate a refresh token.')
|
||||
raise ValueError("Client is required to generate a refresh token.")
|
||||
|
||||
if (token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN and
|
||||
client_name is None):
|
||||
raise ValueError('Client_name is required for long-lived access '
|
||||
'token')
|
||||
if (
|
||||
token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN
|
||||
and client_name is None
|
||||
):
|
||||
raise ValueError("Client_name is required for long-lived access " "token")
|
||||
|
||||
if token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN:
|
||||
for token in user.refresh_tokens.values():
|
||||
if (token.client_name == client_name and token.token_type ==
|
||||
models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN):
|
||||
if (
|
||||
token.client_name == client_name
|
||||
and token.token_type == models.TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN
|
||||
):
|
||||
# Each client_name can only have one
|
||||
# long_lived_access_token type of refresh token
|
||||
raise ValueError('{} already exists'.format(client_name))
|
||||
raise ValueError("{} already exists".format(client_name))
|
||||
|
||||
return await self._store.async_create_refresh_token(
|
||||
user, client_id, client_name, client_icon,
|
||||
token_type, access_token_expiration)
|
||||
user,
|
||||
client_id,
|
||||
client_name,
|
||||
client_icon,
|
||||
token_type,
|
||||
access_token_expiration,
|
||||
)
|
||||
|
||||
async def async_get_refresh_token(
|
||||
self, token_id: str) -> Optional[models.RefreshToken]:
|
||||
self, token_id: str
|
||||
) -> Optional[models.RefreshToken]:
|
||||
"""Get refresh token by id."""
|
||||
return await self._store.async_get_refresh_token(token_id)
|
||||
|
||||
async def async_get_refresh_token_by_token(
|
||||
self, token: str) -> Optional[models.RefreshToken]:
|
||||
self, token: str
|
||||
) -> Optional[models.RefreshToken]:
|
||||
"""Get refresh token by token."""
|
||||
return await self._store.async_get_refresh_token_by_token(token)
|
||||
|
||||
async def async_remove_refresh_token(self,
|
||||
refresh_token: models.RefreshToken) \
|
||||
-> None:
|
||||
async def async_remove_refresh_token(
|
||||
self, refresh_token: models.RefreshToken
|
||||
) -> None:
|
||||
"""Delete a refresh token."""
|
||||
await self._store.async_remove_refresh_token(refresh_token)
|
||||
|
||||
@callback
|
||||
def async_create_access_token(self,
|
||||
refresh_token: models.RefreshToken,
|
||||
remote_ip: Optional[str] = None) -> str:
|
||||
def async_create_access_token(
|
||||
self, refresh_token: models.RefreshToken, remote_ip: Optional[str] = None
|
||||
) -> str:
|
||||
"""Create a new access token."""
|
||||
self._store.async_log_refresh_token_usage(refresh_token, remote_ip)
|
||||
|
||||
now = dt_util.utcnow()
|
||||
return jwt.encode({
|
||||
'iss': refresh_token.id,
|
||||
'iat': now,
|
||||
'exp': now + refresh_token.access_token_expiration,
|
||||
}, refresh_token.jwt_key, algorithm='HS256').decode()
|
||||
return jwt.encode(
|
||||
{
|
||||
"iss": refresh_token.id,
|
||||
"iat": now,
|
||||
"exp": now + refresh_token.access_token_expiration,
|
||||
},
|
||||
refresh_token.jwt_key,
|
||||
algorithm="HS256",
|
||||
).decode()
|
||||
|
||||
async def async_validate_access_token(
|
||||
self, token: str) -> Optional[models.RefreshToken]:
|
||||
self, token: str
|
||||
) -> Optional[models.RefreshToken]:
|
||||
"""Return refresh token if an access token is valid."""
|
||||
try:
|
||||
unverif_claims = jwt.decode(token, verify=False)
|
||||
@@ -387,23 +413,18 @@ class AuthManager:
|
||||
return None
|
||||
|
||||
refresh_token = await self.async_get_refresh_token(
|
||||
cast(str, unverif_claims.get('iss')))
|
||||
cast(str, unverif_claims.get("iss"))
|
||||
)
|
||||
|
||||
if refresh_token is None:
|
||||
jwt_key = ''
|
||||
issuer = ''
|
||||
jwt_key = ""
|
||||
issuer = ""
|
||||
else:
|
||||
jwt_key = refresh_token.jwt_key
|
||||
issuer = refresh_token.id
|
||||
|
||||
try:
|
||||
jwt.decode(
|
||||
token,
|
||||
jwt_key,
|
||||
leeway=10,
|
||||
issuer=issuer,
|
||||
algorithms=['HS256']
|
||||
)
|
||||
jwt.decode(token, jwt_key, leeway=10, issuer=issuer, algorithms=["HS256"])
|
||||
except jwt.InvalidTokenError:
|
||||
return None
|
||||
|
||||
@@ -413,31 +434,32 @@ class AuthManager:
|
||||
return refresh_token
|
||||
|
||||
async def _async_create_login_flow(
|
||||
self, handler: _ProviderKey, *, context: Optional[Dict],
|
||||
data: Optional[Any]) -> data_entry_flow.FlowHandler:
|
||||
self, handler: _ProviderKey, *, context: Optional[Dict], data: Optional[Any]
|
||||
) -> data_entry_flow.FlowHandler:
|
||||
"""Create a login flow."""
|
||||
auth_provider = self._providers[handler]
|
||||
|
||||
return await auth_provider.async_login_flow(context)
|
||||
|
||||
async def _async_finish_login_flow(
|
||||
self, flow: LoginFlow, result: Dict[str, Any]) \
|
||||
-> Dict[str, Any]:
|
||||
self, flow: LoginFlow, result: Dict[str, Any]
|
||||
) -> Dict[str, Any]:
|
||||
"""Return a user as result of login flow."""
|
||||
if result['type'] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
||||
if result["type"] != data_entry_flow.RESULT_TYPE_CREATE_ENTRY:
|
||||
return result
|
||||
|
||||
# we got final result
|
||||
if isinstance(result['data'], models.User):
|
||||
result['result'] = result['data']
|
||||
if isinstance(result["data"], models.User):
|
||||
result["result"] = result["data"]
|
||||
return result
|
||||
|
||||
auth_provider = self._providers[result['handler']]
|
||||
auth_provider = self._providers[result["handler"]]
|
||||
credentials = await auth_provider.async_get_or_create_credentials(
|
||||
result['data'])
|
||||
result["data"]
|
||||
)
|
||||
|
||||
if flow.context is not None and flow.context.get('credential_only'):
|
||||
result['result'] = credentials
|
||||
if flow.context is not None and flow.context.get("credential_only"):
|
||||
result["result"] = credentials
|
||||
return result
|
||||
|
||||
# multi-factor module cannot enabled for new credential
|
||||
@@ -452,15 +474,18 @@ class AuthManager:
|
||||
flow.available_mfa_modules = modules
|
||||
return await flow.async_step_select_mfa_module()
|
||||
|
||||
result['result'] = await self.async_get_or_create_user(credentials)
|
||||
result["result"] = await self.async_get_or_create_user(credentials)
|
||||
return result
|
||||
|
||||
@callback
|
||||
def _async_get_auth_provider(
|
||||
self, credentials: models.Credentials) -> Optional[AuthProvider]:
|
||||
self, credentials: models.Credentials
|
||||
) -> Optional[AuthProvider]:
|
||||
"""Get auth provider from a set of credentials."""
|
||||
auth_provider_key = (credentials.auth_provider_type,
|
||||
credentials.auth_provider_id)
|
||||
auth_provider_key = (
|
||||
credentials.auth_provider_type,
|
||||
credentials.auth_provider_id,
|
||||
)
|
||||
return self._providers.get(auth_provider_key)
|
||||
|
||||
async def _user_should_be_owner(self) -> bool:
|
||||
|
||||
@@ -16,10 +16,10 @@ from .permissions import PermissionLookup, system_policies
|
||||
from .permissions.types import PolicyType # noqa: F401
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = 'auth'
|
||||
GROUP_NAME_ADMIN = 'Administrators'
|
||||
STORAGE_KEY = "auth"
|
||||
GROUP_NAME_ADMIN = "Administrators"
|
||||
GROUP_NAME_USER = "Users"
|
||||
GROUP_NAME_READ_ONLY = 'Read Only'
|
||||
GROUP_NAME_READ_ONLY = "Read Only"
|
||||
|
||||
|
||||
class AuthStore:
|
||||
@@ -37,8 +37,9 @@ class AuthStore:
|
||||
self._users = None # type: Optional[Dict[str, models.User]]
|
||||
self._groups = None # type: Optional[Dict[str, models.Group]]
|
||||
self._perm_lookup = None # type: Optional[PermissionLookup]
|
||||
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY,
|
||||
private=True)
|
||||
self._store = hass.helpers.storage.Store(
|
||||
STORAGE_VERSION, STORAGE_KEY, private=True
|
||||
)
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
async def async_get_groups(self) -> List[models.Group]:
|
||||
@@ -74,11 +75,14 @@ class AuthStore:
|
||||
return self._users.get(user_id)
|
||||
|
||||
async def async_create_user(
|
||||
self, name: Optional[str], is_owner: Optional[bool] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
system_generated: Optional[bool] = None,
|
||||
credentials: Optional[models.Credentials] = None,
|
||||
group_ids: Optional[List[str]] = None) -> models.User:
|
||||
self,
|
||||
name: Optional[str],
|
||||
is_owner: Optional[bool] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
system_generated: Optional[bool] = None,
|
||||
credentials: Optional[models.Credentials] = None,
|
||||
group_ids: Optional[List[str]] = None,
|
||||
) -> models.User:
|
||||
"""Create a new user."""
|
||||
if self._users is None:
|
||||
await self._async_load()
|
||||
@@ -87,28 +91,28 @@ class AuthStore:
|
||||
assert self._groups is not None
|
||||
|
||||
groups = []
|
||||
for group_id in (group_ids or []):
|
||||
for group_id in group_ids or []:
|
||||
group = self._groups.get(group_id)
|
||||
if group is None:
|
||||
raise ValueError('Invalid group specified {}'.format(group_id))
|
||||
raise ValueError("Invalid group specified {}".format(group_id))
|
||||
groups.append(group)
|
||||
|
||||
kwargs = {
|
||||
'name': name,
|
||||
"name": name,
|
||||
# Until we get group management, we just put everyone in the
|
||||
# same group.
|
||||
'groups': groups,
|
||||
'perm_lookup': self._perm_lookup,
|
||||
"groups": groups,
|
||||
"perm_lookup": self._perm_lookup,
|
||||
} # type: Dict[str, Any]
|
||||
|
||||
if is_owner is not None:
|
||||
kwargs['is_owner'] = is_owner
|
||||
kwargs["is_owner"] = is_owner
|
||||
|
||||
if is_active is not None:
|
||||
kwargs['is_active'] = is_active
|
||||
kwargs["is_active"] = is_active
|
||||
|
||||
if system_generated is not None:
|
||||
kwargs['system_generated'] = system_generated
|
||||
kwargs["system_generated"] = system_generated
|
||||
|
||||
new_user = models.User(**kwargs)
|
||||
|
||||
@@ -122,8 +126,9 @@ class AuthStore:
|
||||
await self.async_link_user(new_user, credentials)
|
||||
return new_user
|
||||
|
||||
async def async_link_user(self, user: models.User,
|
||||
credentials: models.Credentials) -> None:
|
||||
async def async_link_user(
|
||||
self, user: models.User, credentials: models.Credentials
|
||||
) -> None:
|
||||
"""Add credentials to an existing user."""
|
||||
user.credentials.append(credentials)
|
||||
self._async_schedule_save()
|
||||
@@ -139,9 +144,12 @@ class AuthStore:
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_update_user(
|
||||
self, user: models.User, name: Optional[str] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
group_ids: Optional[List[str]] = None) -> None:
|
||||
self,
|
||||
user: models.User,
|
||||
name: Optional[str] = None,
|
||||
is_active: Optional[bool] = None,
|
||||
group_ids: Optional[List[str]] = None,
|
||||
) -> None:
|
||||
"""Update a user."""
|
||||
assert self._groups is not None
|
||||
|
||||
@@ -156,10 +164,7 @@ class AuthStore:
|
||||
user.groups = groups
|
||||
user.invalidate_permission_cache()
|
||||
|
||||
for attr_name, value in (
|
||||
('name', name),
|
||||
('is_active', is_active),
|
||||
):
|
||||
for attr_name, value in (("name", name), ("is_active", is_active)):
|
||||
if value is not None:
|
||||
setattr(user, attr_name, value)
|
||||
|
||||
@@ -175,8 +180,7 @@ class AuthStore:
|
||||
user.is_active = False
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_remove_credentials(
|
||||
self, credentials: models.Credentials) -> None:
|
||||
async def async_remove_credentials(self, credentials: models.Credentials) -> None:
|
||||
"""Remove credentials."""
|
||||
if self._users is None:
|
||||
await self._async_load()
|
||||
@@ -197,23 +201,25 @@ class AuthStore:
|
||||
self._async_schedule_save()
|
||||
|
||||
async def async_create_refresh_token(
|
||||
self, user: models.User, client_id: Optional[str] = None,
|
||||
client_name: Optional[str] = None,
|
||||
client_icon: Optional[str] = None,
|
||||
token_type: str = models.TOKEN_TYPE_NORMAL,
|
||||
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION) \
|
||||
-> models.RefreshToken:
|
||||
self,
|
||||
user: models.User,
|
||||
client_id: Optional[str] = None,
|
||||
client_name: Optional[str] = None,
|
||||
client_icon: Optional[str] = None,
|
||||
token_type: str = models.TOKEN_TYPE_NORMAL,
|
||||
access_token_expiration: timedelta = ACCESS_TOKEN_EXPIRATION,
|
||||
) -> models.RefreshToken:
|
||||
"""Create a new token for a user."""
|
||||
kwargs = {
|
||||
'user': user,
|
||||
'client_id': client_id,
|
||||
'token_type': token_type,
|
||||
'access_token_expiration': access_token_expiration
|
||||
"user": user,
|
||||
"client_id": client_id,
|
||||
"token_type": token_type,
|
||||
"access_token_expiration": access_token_expiration,
|
||||
} # type: Dict[str, Any]
|
||||
if client_name:
|
||||
kwargs['client_name'] = client_name
|
||||
kwargs["client_name"] = client_name
|
||||
if client_icon:
|
||||
kwargs['client_icon'] = client_icon
|
||||
kwargs["client_icon"] = client_icon
|
||||
|
||||
refresh_token = models.RefreshToken(**kwargs)
|
||||
user.refresh_tokens[refresh_token.id] = refresh_token
|
||||
@@ -222,7 +228,8 @@ class AuthStore:
|
||||
return refresh_token
|
||||
|
||||
async def async_remove_refresh_token(
|
||||
self, refresh_token: models.RefreshToken) -> None:
|
||||
self, refresh_token: models.RefreshToken
|
||||
) -> None:
|
||||
"""Remove a refresh token."""
|
||||
if self._users is None:
|
||||
await self._async_load()
|
||||
@@ -234,7 +241,8 @@ class AuthStore:
|
||||
break
|
||||
|
||||
async def async_get_refresh_token(
|
||||
self, token_id: str) -> Optional[models.RefreshToken]:
|
||||
self, token_id: str
|
||||
) -> Optional[models.RefreshToken]:
|
||||
"""Get refresh token by id."""
|
||||
if self._users is None:
|
||||
await self._async_load()
|
||||
@@ -248,7 +256,8 @@ class AuthStore:
|
||||
return None
|
||||
|
||||
async def async_get_refresh_token_by_token(
|
||||
self, token: str) -> Optional[models.RefreshToken]:
|
||||
self, token: str
|
||||
) -> Optional[models.RefreshToken]:
|
||||
"""Get refresh token by token."""
|
||||
if self._users is None:
|
||||
await self._async_load()
|
||||
@@ -265,8 +274,8 @@ class AuthStore:
|
||||
|
||||
@callback
|
||||
def async_log_refresh_token_usage(
|
||||
self, refresh_token: models.RefreshToken,
|
||||
remote_ip: Optional[str] = None) -> None:
|
||||
self, refresh_token: models.RefreshToken, remote_ip: Optional[str] = None
|
||||
) -> None:
|
||||
"""Update refresh token last used information."""
|
||||
refresh_token.last_used_at = dt_util.utcnow()
|
||||
refresh_token.last_used_ip = remote_ip
|
||||
@@ -292,9 +301,7 @@ class AuthStore:
|
||||
if self._users is not None:
|
||||
return
|
||||
|
||||
self._perm_lookup = perm_lookup = PermissionLookup(
|
||||
ent_reg, dev_reg
|
||||
)
|
||||
self._perm_lookup = perm_lookup = PermissionLookup(ent_reg, dev_reg)
|
||||
|
||||
if data is None:
|
||||
self._set_defaults()
|
||||
@@ -317,24 +324,24 @@ class AuthStore:
|
||||
# prevents crashing if user rolls back HA version after a new property
|
||||
# was added.
|
||||
|
||||
for group_dict in data.get('groups', []):
|
||||
for group_dict in data.get("groups", []):
|
||||
policy = None # type: Optional[PolicyType]
|
||||
|
||||
if group_dict['id'] == GROUP_ID_ADMIN:
|
||||
if group_dict["id"] == GROUP_ID_ADMIN:
|
||||
has_admin_group = True
|
||||
|
||||
name = GROUP_NAME_ADMIN
|
||||
policy = system_policies.ADMIN_POLICY
|
||||
system_generated = True
|
||||
|
||||
elif group_dict['id'] == GROUP_ID_USER:
|
||||
elif group_dict["id"] == GROUP_ID_USER:
|
||||
has_user_group = True
|
||||
|
||||
name = GROUP_NAME_USER
|
||||
policy = system_policies.USER_POLICY
|
||||
system_generated = True
|
||||
|
||||
elif group_dict['id'] == GROUP_ID_READ_ONLY:
|
||||
elif group_dict["id"] == GROUP_ID_READ_ONLY:
|
||||
has_read_only_group = True
|
||||
|
||||
name = GROUP_NAME_READ_ONLY
|
||||
@@ -342,18 +349,18 @@ class AuthStore:
|
||||
system_generated = True
|
||||
|
||||
else:
|
||||
name = group_dict['name']
|
||||
policy = group_dict.get('policy')
|
||||
name = group_dict["name"]
|
||||
policy = group_dict.get("policy")
|
||||
system_generated = False
|
||||
|
||||
# We don't want groups without a policy that are not system groups
|
||||
# This is part of migrating from state 1
|
||||
if policy is None:
|
||||
group_without_policy = group_dict['id']
|
||||
group_without_policy = group_dict["id"]
|
||||
continue
|
||||
|
||||
groups[group_dict['id']] = models.Group(
|
||||
id=group_dict['id'],
|
||||
groups[group_dict["id"]] = models.Group(
|
||||
id=group_dict["id"],
|
||||
name=name,
|
||||
policy=policy,
|
||||
system_generated=system_generated,
|
||||
@@ -361,8 +368,7 @@ class AuthStore:
|
||||
|
||||
# If there are no groups, add all existing users to the admin group.
|
||||
# This is part of migrating from state 2
|
||||
migrate_users_to_admin_group = (not groups and
|
||||
group_without_policy is None)
|
||||
migrate_users_to_admin_group = not groups and group_without_policy is None
|
||||
|
||||
# If we find a no_policy_group, we need to migrate all users to the
|
||||
# admin group. We only do this if there are no other groups, as is
|
||||
@@ -385,82 +391,86 @@ class AuthStore:
|
||||
user_group = _system_user_group()
|
||||
groups[user_group.id] = user_group
|
||||
|
||||
for user_dict in data['users']:
|
||||
for user_dict in data["users"]:
|
||||
# Collect the users group.
|
||||
user_groups = []
|
||||
for group_id in user_dict.get('group_ids', []):
|
||||
for group_id in user_dict.get("group_ids", []):
|
||||
# This is part of migrating from state 1
|
||||
if group_id == group_without_policy:
|
||||
group_id = GROUP_ID_ADMIN
|
||||
user_groups.append(groups[group_id])
|
||||
|
||||
# This is part of migrating from state 2
|
||||
if (not user_dict['system_generated'] and
|
||||
migrate_users_to_admin_group):
|
||||
if not user_dict["system_generated"] and migrate_users_to_admin_group:
|
||||
user_groups.append(groups[GROUP_ID_ADMIN])
|
||||
|
||||
users[user_dict['id']] = models.User(
|
||||
name=user_dict['name'],
|
||||
users[user_dict["id"]] = models.User(
|
||||
name=user_dict["name"],
|
||||
groups=user_groups,
|
||||
id=user_dict['id'],
|
||||
is_owner=user_dict['is_owner'],
|
||||
is_active=user_dict['is_active'],
|
||||
system_generated=user_dict['system_generated'],
|
||||
id=user_dict["id"],
|
||||
is_owner=user_dict["is_owner"],
|
||||
is_active=user_dict["is_active"],
|
||||
system_generated=user_dict["system_generated"],
|
||||
perm_lookup=perm_lookup,
|
||||
)
|
||||
|
||||
for cred_dict in data['credentials']:
|
||||
users[cred_dict['user_id']].credentials.append(models.Credentials(
|
||||
id=cred_dict['id'],
|
||||
is_new=False,
|
||||
auth_provider_type=cred_dict['auth_provider_type'],
|
||||
auth_provider_id=cred_dict['auth_provider_id'],
|
||||
data=cred_dict['data'],
|
||||
))
|
||||
for cred_dict in data["credentials"]:
|
||||
users[cred_dict["user_id"]].credentials.append(
|
||||
models.Credentials(
|
||||
id=cred_dict["id"],
|
||||
is_new=False,
|
||||
auth_provider_type=cred_dict["auth_provider_type"],
|
||||
auth_provider_id=cred_dict["auth_provider_id"],
|
||||
data=cred_dict["data"],
|
||||
)
|
||||
)
|
||||
|
||||
for rt_dict in data['refresh_tokens']:
|
||||
for rt_dict in data["refresh_tokens"]:
|
||||
# Filter out the old keys that don't have jwt_key (pre-0.76)
|
||||
if 'jwt_key' not in rt_dict:
|
||||
if "jwt_key" not in rt_dict:
|
||||
continue
|
||||
|
||||
created_at = dt_util.parse_datetime(rt_dict['created_at'])
|
||||
created_at = dt_util.parse_datetime(rt_dict["created_at"])
|
||||
if created_at is None:
|
||||
getLogger(__name__).error(
|
||||
'Ignoring refresh token %(id)s with invalid created_at '
|
||||
'%(created_at)s for user_id %(user_id)s', rt_dict)
|
||||
"Ignoring refresh token %(id)s with invalid created_at "
|
||||
"%(created_at)s for user_id %(user_id)s",
|
||||
rt_dict,
|
||||
)
|
||||
continue
|
||||
|
||||
token_type = rt_dict.get('token_type')
|
||||
token_type = rt_dict.get("token_type")
|
||||
if token_type is None:
|
||||
if rt_dict['client_id'] is None:
|
||||
if rt_dict["client_id"] is None:
|
||||
token_type = models.TOKEN_TYPE_SYSTEM
|
||||
else:
|
||||
token_type = models.TOKEN_TYPE_NORMAL
|
||||
|
||||
# old refresh_token don't have last_used_at (pre-0.78)
|
||||
last_used_at_str = rt_dict.get('last_used_at')
|
||||
last_used_at_str = rt_dict.get("last_used_at")
|
||||
if last_used_at_str:
|
||||
last_used_at = dt_util.parse_datetime(last_used_at_str)
|
||||
else:
|
||||
last_used_at = None
|
||||
|
||||
token = models.RefreshToken(
|
||||
id=rt_dict['id'],
|
||||
user=users[rt_dict['user_id']],
|
||||
client_id=rt_dict['client_id'],
|
||||
id=rt_dict["id"],
|
||||
user=users[rt_dict["user_id"]],
|
||||
client_id=rt_dict["client_id"],
|
||||
# use dict.get to keep backward compatibility
|
||||
client_name=rt_dict.get('client_name'),
|
||||
client_icon=rt_dict.get('client_icon'),
|
||||
client_name=rt_dict.get("client_name"),
|
||||
client_icon=rt_dict.get("client_icon"),
|
||||
token_type=token_type,
|
||||
created_at=created_at,
|
||||
access_token_expiration=timedelta(
|
||||
seconds=rt_dict['access_token_expiration']),
|
||||
token=rt_dict['token'],
|
||||
jwt_key=rt_dict['jwt_key'],
|
||||
seconds=rt_dict["access_token_expiration"]
|
||||
),
|
||||
token=rt_dict["token"],
|
||||
jwt_key=rt_dict["jwt_key"],
|
||||
last_used_at=last_used_at,
|
||||
last_used_ip=rt_dict.get('last_used_ip'),
|
||||
last_used_ip=rt_dict.get("last_used_ip"),
|
||||
)
|
||||
users[rt_dict['user_id']].refresh_tokens[token.id] = token
|
||||
users[rt_dict["user_id"]].refresh_tokens[token.id] = token
|
||||
|
||||
self._groups = groups
|
||||
self._users = users
|
||||
@@ -481,12 +491,12 @@ class AuthStore:
|
||||
|
||||
users = [
|
||||
{
|
||||
'id': user.id,
|
||||
'group_ids': [group.id for group in user.groups],
|
||||
'is_owner': user.is_owner,
|
||||
'is_active': user.is_active,
|
||||
'name': user.name,
|
||||
'system_generated': user.system_generated,
|
||||
"id": user.id,
|
||||
"group_ids": [group.id for group in user.groups],
|
||||
"is_owner": user.is_owner,
|
||||
"is_active": user.is_active,
|
||||
"name": user.name,
|
||||
"system_generated": user.system_generated,
|
||||
}
|
||||
for user in self._users.values()
|
||||
]
|
||||
@@ -494,23 +504,23 @@ class AuthStore:
|
||||
groups = []
|
||||
for group in self._groups.values():
|
||||
g_dict = {
|
||||
'id': group.id,
|
||||
"id": group.id,
|
||||
# Name not read for sys groups. Kept here for backwards compat
|
||||
'name': group.name
|
||||
"name": group.name,
|
||||
} # type: Dict[str, Any]
|
||||
|
||||
if not group.system_generated:
|
||||
g_dict['policy'] = group.policy
|
||||
g_dict["policy"] = group.policy
|
||||
|
||||
groups.append(g_dict)
|
||||
|
||||
credentials = [
|
||||
{
|
||||
'id': credential.id,
|
||||
'user_id': user.id,
|
||||
'auth_provider_type': credential.auth_provider_type,
|
||||
'auth_provider_id': credential.auth_provider_id,
|
||||
'data': credential.data,
|
||||
"id": credential.id,
|
||||
"user_id": user.id,
|
||||
"auth_provider_type": credential.auth_provider_type,
|
||||
"auth_provider_id": credential.auth_provider_id,
|
||||
"data": credential.data,
|
||||
}
|
||||
for user in self._users.values()
|
||||
for credential in user.credentials
|
||||
@@ -518,36 +528,35 @@ class AuthStore:
|
||||
|
||||
refresh_tokens = [
|
||||
{
|
||||
'id': refresh_token.id,
|
||||
'user_id': user.id,
|
||||
'client_id': refresh_token.client_id,
|
||||
'client_name': refresh_token.client_name,
|
||||
'client_icon': refresh_token.client_icon,
|
||||
'token_type': refresh_token.token_type,
|
||||
'created_at': refresh_token.created_at.isoformat(),
|
||||
'access_token_expiration':
|
||||
refresh_token.access_token_expiration.total_seconds(),
|
||||
'token': refresh_token.token,
|
||||
'jwt_key': refresh_token.jwt_key,
|
||||
'last_used_at':
|
||||
refresh_token.last_used_at.isoformat()
|
||||
if refresh_token.last_used_at else None,
|
||||
'last_used_ip': refresh_token.last_used_ip,
|
||||
"id": refresh_token.id,
|
||||
"user_id": user.id,
|
||||
"client_id": refresh_token.client_id,
|
||||
"client_name": refresh_token.client_name,
|
||||
"client_icon": refresh_token.client_icon,
|
||||
"token_type": refresh_token.token_type,
|
||||
"created_at": refresh_token.created_at.isoformat(),
|
||||
"access_token_expiration": refresh_token.access_token_expiration.total_seconds(),
|
||||
"token": refresh_token.token,
|
||||
"jwt_key": refresh_token.jwt_key,
|
||||
"last_used_at": refresh_token.last_used_at.isoformat()
|
||||
if refresh_token.last_used_at
|
||||
else None,
|
||||
"last_used_ip": refresh_token.last_used_ip,
|
||||
}
|
||||
for user in self._users.values()
|
||||
for refresh_token in user.refresh_tokens.values()
|
||||
]
|
||||
|
||||
return {
|
||||
'users': users,
|
||||
'groups': groups,
|
||||
'credentials': credentials,
|
||||
'refresh_tokens': refresh_tokens,
|
||||
"users": users,
|
||||
"groups": groups,
|
||||
"credentials": credentials,
|
||||
"refresh_tokens": refresh_tokens,
|
||||
}
|
||||
|
||||
def _set_defaults(self) -> None:
|
||||
"""Set default values for auth store."""
|
||||
self._users = OrderedDict() # type: Dict[str, models.User]
|
||||
self._users = OrderedDict()
|
||||
|
||||
groups = OrderedDict() # type: Dict[str, models.Group]
|
||||
admin_group = _system_admin_group()
|
||||
|
||||
@@ -4,6 +4,6 @@ from datetime import timedelta
|
||||
ACCESS_TOKEN_EXPIRATION = timedelta(minutes=30)
|
||||
MFA_SESSION_EXPIRATION = timedelta(minutes=5)
|
||||
|
||||
GROUP_ID_ADMIN = 'system-admin'
|
||||
GROUP_ID_USER = 'system-users'
|
||||
GROUP_ID_READ_ONLY = 'system-read-only'
|
||||
GROUP_ID_ADMIN = "system-admin"
|
||||
GROUP_ID_USER = "system-users"
|
||||
GROUP_ID_READ_ONLY = "system-read-only"
|
||||
|
||||
@@ -15,14 +15,17 @@ from homeassistant.util.decorator import Registry
|
||||
|
||||
MULTI_FACTOR_AUTH_MODULES = Registry()
|
||||
|
||||
MULTI_FACTOR_AUTH_MODULE_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_TYPE): str,
|
||||
vol.Optional(CONF_NAME): str,
|
||||
# Specify ID if you have two mfa auth module for same type.
|
||||
vol.Optional(CONF_ID): str,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
MULTI_FACTOR_AUTH_MODULE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TYPE): str,
|
||||
vol.Optional(CONF_NAME): str,
|
||||
# Specify ID if you have two mfa auth module for same type.
|
||||
vol.Optional(CONF_ID): str,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
DATA_REQS = 'mfa_auth_module_reqs_processed'
|
||||
DATA_REQS = "mfa_auth_module_reqs_processed"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -30,7 +33,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class MultiFactorAuthModule:
|
||||
"""Multi-factor Auth Module of validation function."""
|
||||
|
||||
DEFAULT_TITLE = 'Unnamed auth module'
|
||||
DEFAULT_TITLE = "Unnamed auth module"
|
||||
MAX_RETRY_TIME = 3
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None:
|
||||
@@ -63,7 +66,7 @@ class MultiFactorAuthModule:
|
||||
"""Return a voluptuous schema to define mfa auth module's input."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> 'SetupFlow':
|
||||
async def async_setup_flow(self, user_id: str) -> "SetupFlow":
|
||||
"""Return a data entry flow handler for setup module.
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
@@ -82,8 +85,7 @@ class MultiFactorAuthModule:
|
||||
"""Return whether user is setup."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_validate(
|
||||
self, user_id: str, user_input: Dict[str, Any]) -> bool:
|
||||
async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool:
|
||||
"""Return True if validation passed."""
|
||||
raise NotImplementedError
|
||||
|
||||
@@ -91,17 +93,17 @@ class MultiFactorAuthModule:
|
||||
class SetupFlow(data_entry_flow.FlowHandler):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(self, auth_module: MultiFactorAuthModule,
|
||||
setup_schema: vol.Schema,
|
||||
user_id: str) -> None:
|
||||
def __init__(
|
||||
self, auth_module: MultiFactorAuthModule, setup_schema: vol.Schema, user_id: str
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
self._auth_module = auth_module
|
||||
self._setup_schema = setup_schema
|
||||
self._user_id = user_id
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the first step of setup flow.
|
||||
|
||||
Return self.async_show_form(step_id='init') if user_input is None.
|
||||
@@ -110,23 +112,19 @@ class SetupFlow(data_entry_flow.FlowHandler):
|
||||
errors = {} # type: Dict[str, str]
|
||||
|
||||
if user_input:
|
||||
result = await self._auth_module.async_setup_user(
|
||||
self._user_id, user_input)
|
||||
result = await self._auth_module.async_setup_user(self._user_id, user_input)
|
||||
return self.async_create_entry(
|
||||
title=self._auth_module.name,
|
||||
data={'result': result}
|
||||
title=self._auth_module.name, data={"result": result}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
data_schema=self._setup_schema,
|
||||
errors=errors
|
||||
step_id="init", data_schema=self._setup_schema, errors=errors
|
||||
)
|
||||
|
||||
|
||||
async def auth_mfa_module_from_config(
|
||||
hass: HomeAssistant, config: Dict[str, Any]) \
|
||||
-> MultiFactorAuthModule:
|
||||
hass: HomeAssistant, config: Dict[str, Any]
|
||||
) -> MultiFactorAuthModule:
|
||||
"""Initialize an auth module from a config."""
|
||||
module_name = config[CONF_TYPE]
|
||||
module = await _load_mfa_module(hass, module_name)
|
||||
@@ -134,26 +132,29 @@ async def auth_mfa_module_from_config(
|
||||
try:
|
||||
config = module.CONFIG_SCHEMA(config) # type: ignore
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error('Invalid configuration for multi-factor module %s: %s',
|
||||
module_name, humanize_error(config, err))
|
||||
_LOGGER.error(
|
||||
"Invalid configuration for multi-factor module %s: %s",
|
||||
module_name,
|
||||
humanize_error(config, err),
|
||||
)
|
||||
raise
|
||||
|
||||
return MULTI_FACTOR_AUTH_MODULES[module_name](hass, config) # type: ignore
|
||||
|
||||
|
||||
async def _load_mfa_module(hass: HomeAssistant, module_name: str) \
|
||||
-> types.ModuleType:
|
||||
async def _load_mfa_module(hass: HomeAssistant, module_name: str) -> types.ModuleType:
|
||||
"""Load an mfa auth module."""
|
||||
module_path = 'homeassistant.auth.mfa_modules.{}'.format(module_name)
|
||||
module_path = "homeassistant.auth.mfa_modules.{}".format(module_name)
|
||||
|
||||
try:
|
||||
module = importlib.import_module(module_path)
|
||||
except ImportError as err:
|
||||
_LOGGER.error('Unable to load mfa module %s: %s', module_name, err)
|
||||
raise HomeAssistantError('Unable to load mfa module {}: {}'.format(
|
||||
module_name, err))
|
||||
_LOGGER.error("Unable to load mfa module %s: %s", module_name, err)
|
||||
raise HomeAssistantError(
|
||||
"Unable to load mfa module {}: {}".format(module_name, err)
|
||||
)
|
||||
|
||||
if hass.config.skip_pip or not hasattr(module, 'REQUIREMENTS'):
|
||||
if hass.config.skip_pip or not hasattr(module, "REQUIREMENTS"):
|
||||
return module
|
||||
|
||||
processed = hass.data.get(DATA_REQS)
|
||||
@@ -164,12 +165,13 @@ async def _load_mfa_module(hass: HomeAssistant, module_name: str) \
|
||||
|
||||
# https://github.com/python/mypy/issues/1424
|
||||
req_success = await requirements.async_process_requirements(
|
||||
hass, module_path, module.REQUIREMENTS) # type: ignore
|
||||
hass, module_path, module.REQUIREMENTS # type: ignore
|
||||
)
|
||||
|
||||
if not req_success:
|
||||
raise HomeAssistantError(
|
||||
'Unable to process requirements of mfa module {}'.format(
|
||||
module_name))
|
||||
"Unable to process requirements of mfa module {}".format(module_name)
|
||||
)
|
||||
|
||||
processed.add(module_name)
|
||||
return module
|
||||
|
||||
@@ -6,39 +6,45 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import MultiFactorAuthModule, MULTI_FACTOR_AUTH_MODULES, \
|
||||
MULTI_FACTOR_AUTH_MODULE_SCHEMA, SetupFlow
|
||||
from . import (
|
||||
MultiFactorAuthModule,
|
||||
MULTI_FACTOR_AUTH_MODULES,
|
||||
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({
|
||||
vol.Required('data'): [vol.Schema({
|
||||
vol.Required('user_id'): str,
|
||||
vol.Required('pin'): str,
|
||||
})]
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend(
|
||||
{
|
||||
vol.Required("data"): [
|
||||
vol.Schema({vol.Required("user_id"): str, vol.Required("pin"): str})
|
||||
]
|
||||
},
|
||||
extra=vol.PREVENT_EXTRA,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@MULTI_FACTOR_AUTH_MODULES.register('insecure_example')
|
||||
@MULTI_FACTOR_AUTH_MODULES.register("insecure_example")
|
||||
class InsecureExampleModule(MultiFactorAuthModule):
|
||||
"""Example auth module validate pin."""
|
||||
|
||||
DEFAULT_TITLE = 'Insecure Personal Identify Number'
|
||||
DEFAULT_TITLE = "Insecure Personal Identify Number"
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None:
|
||||
"""Initialize the user data store."""
|
||||
super().__init__(hass, config)
|
||||
self._data = config['data']
|
||||
self._data = config["data"]
|
||||
|
||||
@property
|
||||
def input_schema(self) -> vol.Schema:
|
||||
"""Validate login flow input data."""
|
||||
return vol.Schema({'pin': str})
|
||||
return vol.Schema({"pin": str})
|
||||
|
||||
@property
|
||||
def setup_schema(self) -> vol.Schema:
|
||||
"""Validate async_setup_user input data."""
|
||||
return vol.Schema({'pin': str})
|
||||
return vol.Schema({"pin": str})
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
"""Return a data entry flow handler for setup module.
|
||||
@@ -50,21 +56,21 @@ class InsecureExampleModule(MultiFactorAuthModule):
|
||||
async def async_setup_user(self, user_id: str, setup_data: Any) -> Any:
|
||||
"""Set up user to use mfa module."""
|
||||
# data shall has been validate in caller
|
||||
pin = setup_data['pin']
|
||||
pin = setup_data["pin"]
|
||||
|
||||
for data in self._data:
|
||||
if data['user_id'] == user_id:
|
||||
if data["user_id"] == user_id:
|
||||
# already setup, override
|
||||
data['pin'] = pin
|
||||
data["pin"] = pin
|
||||
return
|
||||
|
||||
self._data.append({'user_id': user_id, 'pin': pin})
|
||||
self._data.append({"user_id": user_id, "pin": pin})
|
||||
|
||||
async def async_depose_user(self, user_id: str) -> None:
|
||||
"""Remove user from mfa module."""
|
||||
found = None
|
||||
for data in self._data:
|
||||
if data['user_id'] == user_id:
|
||||
if data["user_id"] == user_id:
|
||||
found = data
|
||||
break
|
||||
if found:
|
||||
@@ -73,17 +79,16 @@ class InsecureExampleModule(MultiFactorAuthModule):
|
||||
async def async_is_user_setup(self, user_id: str) -> bool:
|
||||
"""Return whether user is setup."""
|
||||
for data in self._data:
|
||||
if data['user_id'] == user_id:
|
||||
if data["user_id"] == user_id:
|
||||
return True
|
||||
return False
|
||||
|
||||
async def async_validate(
|
||||
self, user_id: str, user_input: Dict[str, Any]) -> bool:
|
||||
async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool:
|
||||
"""Return True if validation passed."""
|
||||
for data in self._data:
|
||||
if data['user_id'] == user_id:
|
||||
if data["user_id"] == user_id:
|
||||
# user_input has been validate in caller
|
||||
if data['pin'] == user_input['pin']:
|
||||
if data["pin"] == user_input["pin"]:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
@@ -15,26 +15,32 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from . import MultiFactorAuthModule, MULTI_FACTOR_AUTH_MODULES, \
|
||||
MULTI_FACTOR_AUTH_MODULE_SCHEMA, SetupFlow
|
||||
from . import (
|
||||
MultiFactorAuthModule,
|
||||
MULTI_FACTOR_AUTH_MODULES,
|
||||
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ['pyotp==2.2.7']
|
||||
REQUIREMENTS = ["pyotp==2.2.7"]
|
||||
|
||||
CONF_MESSAGE = 'message'
|
||||
CONF_MESSAGE = "message"
|
||||
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({
|
||||
vol.Optional(CONF_INCLUDE): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_EXCLUDE): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_MESSAGE,
|
||||
default='{} is your Home Assistant login code'): str
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_INCLUDE): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_EXCLUDE): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_MESSAGE, default="{} is your Home Assistant login code"): str,
|
||||
},
|
||||
extra=vol.PREVENT_EXTRA,
|
||||
)
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = 'auth_module.notify'
|
||||
STORAGE_USERS = 'users'
|
||||
STORAGE_USER_ID = 'user_id'
|
||||
STORAGE_KEY = "auth_module.notify"
|
||||
STORAGE_USERS = "users"
|
||||
STORAGE_USER_ID = "user_id"
|
||||
|
||||
INPUT_FIELD_CODE = 'code'
|
||||
INPUT_FIELD_CODE = "code"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,24 +48,28 @@ _LOGGER = logging.getLogger(__name__)
|
||||
def _generate_secret() -> str:
|
||||
"""Generate a secret."""
|
||||
import pyotp
|
||||
|
||||
return str(pyotp.random_base32())
|
||||
|
||||
|
||||
def _generate_random() -> int:
|
||||
"""Generate a 8 digit number."""
|
||||
import pyotp
|
||||
return int(pyotp.random_base32(length=8, chars=list('1234567890')))
|
||||
|
||||
return int(pyotp.random_base32(length=8, chars=list("1234567890")))
|
||||
|
||||
|
||||
def _generate_otp(secret: str, count: int) -> str:
|
||||
"""Generate one time password."""
|
||||
import pyotp
|
||||
|
||||
return str(pyotp.HOTP(secret).at(count))
|
||||
|
||||
|
||||
def _verify_otp(secret: str, otp: str, count: int) -> bool:
|
||||
"""Verify one time password."""
|
||||
import pyotp
|
||||
|
||||
return bool(pyotp.HOTP(secret).verify(otp, count))
|
||||
|
||||
|
||||
@@ -67,7 +77,7 @@ def _verify_otp(secret: str, otp: str, count: int) -> bool:
|
||||
class NotifySetting:
|
||||
"""Store notify setting for one user."""
|
||||
|
||||
secret = attr.ib(type=str, factory=_generate_secret) # not persistent
|
||||
secret = attr.ib(type=str, factory=_generate_secret) # not persistent
|
||||
counter = attr.ib(type=int, factory=_generate_random) # not persistent
|
||||
notify_service = attr.ib(type=Optional[str], default=None)
|
||||
target = attr.ib(type=Optional[str], default=None)
|
||||
@@ -76,18 +86,19 @@ class NotifySetting:
|
||||
_UsersDict = Dict[str, NotifySetting]
|
||||
|
||||
|
||||
@MULTI_FACTOR_AUTH_MODULES.register('notify')
|
||||
@MULTI_FACTOR_AUTH_MODULES.register("notify")
|
||||
class NotifyAuthModule(MultiFactorAuthModule):
|
||||
"""Auth module send hmac-based one time password by notify service."""
|
||||
|
||||
DEFAULT_TITLE = 'Notify One-Time Password'
|
||||
DEFAULT_TITLE = "Notify One-Time Password"
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None:
|
||||
"""Initialize the user data store."""
|
||||
super().__init__(hass, config)
|
||||
self._user_settings = None # type: Optional[_UsersDict]
|
||||
self._user_store = hass.helpers.storage.Store(
|
||||
STORAGE_VERSION, STORAGE_KEY, private=True)
|
||||
STORAGE_VERSION, STORAGE_KEY, private=True
|
||||
)
|
||||
self._include = config.get(CONF_INCLUDE, [])
|
||||
self._exclude = config.get(CONF_EXCLUDE, [])
|
||||
self._message_template = config[CONF_MESSAGE]
|
||||
@@ -119,22 +130,27 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
if self._user_settings is None:
|
||||
return
|
||||
|
||||
await self._user_store.async_save({STORAGE_USERS: {
|
||||
user_id: attr.asdict(
|
||||
notify_setting, filter=attr.filters.exclude(
|
||||
attr.fields(NotifySetting).secret,
|
||||
attr.fields(NotifySetting).counter,
|
||||
))
|
||||
for user_id, notify_setting
|
||||
in self._user_settings.items()
|
||||
}})
|
||||
await self._user_store.async_save(
|
||||
{
|
||||
STORAGE_USERS: {
|
||||
user_id: attr.asdict(
|
||||
notify_setting,
|
||||
filter=attr.filters.exclude(
|
||||
attr.fields(NotifySetting).secret,
|
||||
attr.fields(NotifySetting).counter,
|
||||
),
|
||||
)
|
||||
for user_id, notify_setting in self._user_settings.items()
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
@callback
|
||||
def aync_get_available_notify_services(self) -> List[str]:
|
||||
"""Return list of notify services."""
|
||||
unordered_services = set()
|
||||
|
||||
for service in self.hass.services.async_services().get('notify', {}):
|
||||
for service in self.hass.services.async_services().get("notify", {}):
|
||||
if service not in self._exclude:
|
||||
unordered_services.add(service)
|
||||
|
||||
@@ -149,8 +165,8 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
Mfa module should extend SetupFlow
|
||||
"""
|
||||
return NotifySetupFlow(
|
||||
self, self.input_schema, user_id,
|
||||
self.aync_get_available_notify_services())
|
||||
self, self.input_schema, user_id, self.aync_get_available_notify_services()
|
||||
)
|
||||
|
||||
async def async_setup_user(self, user_id: str, setup_data: Any) -> Any:
|
||||
"""Set up auth module for user."""
|
||||
@@ -159,8 +175,8 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
assert self._user_settings is not None
|
||||
|
||||
self._user_settings[user_id] = NotifySetting(
|
||||
notify_service=setup_data.get('notify_service'),
|
||||
target=setup_data.get('target'),
|
||||
notify_service=setup_data.get("notify_service"),
|
||||
target=setup_data.get("target"),
|
||||
)
|
||||
|
||||
await self._async_save()
|
||||
@@ -182,8 +198,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
|
||||
return user_id in self._user_settings
|
||||
|
||||
async def async_validate(
|
||||
self, user_id: str, user_input: Dict[str, Any]) -> bool:
|
||||
async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool:
|
||||
"""Return True if validation passed."""
|
||||
if self._user_settings is None:
|
||||
await self._async_load()
|
||||
@@ -195,9 +210,11 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
|
||||
# user_input has been validate in caller
|
||||
return await self.hass.async_add_executor_job(
|
||||
_verify_otp, notify_setting.secret,
|
||||
user_input.get(INPUT_FIELD_CODE, ''),
|
||||
notify_setting.counter)
|
||||
_verify_otp,
|
||||
notify_setting.secret,
|
||||
user_input.get(INPUT_FIELD_CODE, ""),
|
||||
notify_setting.counter,
|
||||
)
|
||||
|
||||
async def async_initialize_login_mfa_step(self, user_id: str) -> None:
|
||||
"""Generate code and notify user."""
|
||||
@@ -207,7 +224,7 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
|
||||
notify_setting = self._user_settings.get(user_id, None)
|
||||
if notify_setting is None:
|
||||
raise ValueError('Cannot find user_id')
|
||||
raise ValueError("Cannot find user_id")
|
||||
|
||||
def generate_secret_and_one_time_password() -> str:
|
||||
"""Generate and send one time password."""
|
||||
@@ -215,11 +232,11 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
# secret and counter are not persistent
|
||||
notify_setting.secret = _generate_secret()
|
||||
notify_setting.counter = _generate_random()
|
||||
return _generate_otp(
|
||||
notify_setting.secret, notify_setting.counter)
|
||||
return _generate_otp(notify_setting.secret, notify_setting.counter)
|
||||
|
||||
code = await self.hass.async_add_executor_job(
|
||||
generate_secret_and_one_time_password)
|
||||
generate_secret_and_one_time_password
|
||||
)
|
||||
|
||||
await self.async_notify_user(user_id, code)
|
||||
|
||||
@@ -231,105 +248,107 @@ class NotifyAuthModule(MultiFactorAuthModule):
|
||||
|
||||
notify_setting = self._user_settings.get(user_id, None)
|
||||
if notify_setting is None:
|
||||
_LOGGER.error('Cannot find user %s', user_id)
|
||||
_LOGGER.error("Cannot find user %s", user_id)
|
||||
return
|
||||
|
||||
await self.async_notify( # type: ignore
|
||||
code, notify_setting.notify_service, notify_setting.target)
|
||||
await self.async_notify( # type: ignore
|
||||
code, notify_setting.notify_service, notify_setting.target
|
||||
)
|
||||
|
||||
async def async_notify(self, code: str, notify_service: str,
|
||||
target: Optional[str] = None) -> None:
|
||||
async def async_notify(
|
||||
self, code: str, notify_service: str, target: Optional[str] = None
|
||||
) -> None:
|
||||
"""Send code by notify service."""
|
||||
data = {'message': self._message_template.format(code)}
|
||||
data = {"message": self._message_template.format(code)}
|
||||
if target:
|
||||
data['target'] = [target]
|
||||
data["target"] = [target]
|
||||
|
||||
await self.hass.services.async_call('notify', notify_service, data)
|
||||
await self.hass.services.async_call("notify", notify_service, data)
|
||||
|
||||
|
||||
class NotifySetupFlow(SetupFlow):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(self, auth_module: NotifyAuthModule,
|
||||
setup_schema: vol.Schema,
|
||||
user_id: str,
|
||||
available_notify_services: List[str]) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
auth_module: NotifyAuthModule,
|
||||
setup_schema: vol.Schema,
|
||||
user_id: str,
|
||||
available_notify_services: List[str],
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
super().__init__(auth_module, setup_schema, user_id)
|
||||
# to fix typing complaint
|
||||
self._auth_module = auth_module # type: NotifyAuthModule
|
||||
self._available_notify_services = available_notify_services
|
||||
self._secret = None # type: Optional[str]
|
||||
self._count = None # type: Optional[int]
|
||||
self._count = None # type: Optional[int]
|
||||
self._notify_service = None # type: Optional[str]
|
||||
self._target = None # type: Optional[str]
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Let user select available notify services."""
|
||||
errors = {} # type: Dict[str, str]
|
||||
|
||||
hass = self._auth_module.hass
|
||||
if user_input:
|
||||
self._notify_service = user_input['notify_service']
|
||||
self._target = user_input.get('target')
|
||||
self._notify_service = user_input["notify_service"]
|
||||
self._target = user_input.get("target")
|
||||
self._secret = await hass.async_add_executor_job(_generate_secret)
|
||||
self._count = await hass.async_add_executor_job(_generate_random)
|
||||
|
||||
return await self.async_step_setup()
|
||||
|
||||
if not self._available_notify_services:
|
||||
return self.async_abort(reason='no_available_service')
|
||||
return self.async_abort(reason="no_available_service")
|
||||
|
||||
schema = OrderedDict() # type: Dict[str, Any]
|
||||
schema['notify_service'] = vol.In(self._available_notify_services)
|
||||
schema['target'] = vol.Optional(str)
|
||||
schema["notify_service"] = vol.In(self._available_notify_services)
|
||||
schema["target"] = vol.Optional(str)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
data_schema=vol.Schema(schema),
|
||||
errors=errors
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
)
|
||||
|
||||
async def async_step_setup(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Verify user can recevie one-time password."""
|
||||
errors = {} # type: Dict[str, str]
|
||||
|
||||
hass = self._auth_module.hass
|
||||
if user_input:
|
||||
verified = await hass.async_add_executor_job(
|
||||
_verify_otp, self._secret, user_input['code'], self._count)
|
||||
_verify_otp, self._secret, user_input["code"], self._count
|
||||
)
|
||||
if verified:
|
||||
await self._auth_module.async_setup_user(
|
||||
self._user_id, {
|
||||
'notify_service': self._notify_service,
|
||||
'target': self._target,
|
||||
})
|
||||
return self.async_create_entry(
|
||||
title=self._auth_module.name,
|
||||
data={}
|
||||
self._user_id,
|
||||
{"notify_service": self._notify_service, "target": self._target},
|
||||
)
|
||||
return self.async_create_entry(title=self._auth_module.name, data={})
|
||||
|
||||
errors['base'] = 'invalid_code'
|
||||
errors["base"] = "invalid_code"
|
||||
|
||||
# generate code every time, no retry logic
|
||||
assert self._secret and self._count
|
||||
code = await hass.async_add_executor_job(
|
||||
_generate_otp, self._secret, self._count)
|
||||
_generate_otp, self._secret, self._count
|
||||
)
|
||||
|
||||
assert self._notify_service
|
||||
try:
|
||||
await self._auth_module.async_notify(
|
||||
code, self._notify_service, self._target)
|
||||
code, self._notify_service, self._target
|
||||
)
|
||||
except ServiceNotFound:
|
||||
return self.async_abort(reason='notify_service_not_exist')
|
||||
return self.async_abort(reason="notify_service_not_exist")
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='setup',
|
||||
step_id="setup",
|
||||
data_schema=self._setup_schema,
|
||||
description_placeholders={'notify_service': self._notify_service},
|
||||
description_placeholders={"notify_service": self._notify_service},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -9,23 +9,26 @@ import voluptuous as vol
|
||||
from homeassistant.auth.models import User
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import MultiFactorAuthModule, MULTI_FACTOR_AUTH_MODULES, \
|
||||
MULTI_FACTOR_AUTH_MODULE_SCHEMA, SetupFlow
|
||||
from . import (
|
||||
MultiFactorAuthModule,
|
||||
MULTI_FACTOR_AUTH_MODULES,
|
||||
MULTI_FACTOR_AUTH_MODULE_SCHEMA,
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ['pyotp==2.2.7', 'PyQRCode==1.2.1']
|
||||
REQUIREMENTS = ["pyotp==2.2.7", "PyQRCode==1.2.1"]
|
||||
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = 'auth_module.totp'
|
||||
STORAGE_USERS = 'users'
|
||||
STORAGE_USER_ID = 'user_id'
|
||||
STORAGE_OTA_SECRET = 'ota_secret'
|
||||
STORAGE_KEY = "auth_module.totp"
|
||||
STORAGE_USERS = "users"
|
||||
STORAGE_USER_ID = "user_id"
|
||||
STORAGE_OTA_SECRET = "ota_secret"
|
||||
|
||||
INPUT_FIELD_CODE = 'code'
|
||||
INPUT_FIELD_CODE = "code"
|
||||
|
||||
DUMMY_SECRET = 'FPPTH34D4E3MI2HG'
|
||||
DUMMY_SECRET = "FPPTH34D4E3MI2HG"
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -38,10 +41,15 @@ def _generate_qr_code(data: str) -> str:
|
||||
|
||||
with BytesIO() as buffer:
|
||||
qr_code.svg(file=buffer, scale=4)
|
||||
return '{}'.format(
|
||||
buffer.getvalue().decode("ascii").replace('\n', '')
|
||||
.replace('<?xml version="1.0" encoding="UTF-8"?>'
|
||||
'<svg xmlns="http://www.w3.org/2000/svg"', '<svg')
|
||||
return "{}".format(
|
||||
buffer.getvalue()
|
||||
.decode("ascii")
|
||||
.replace("\n", "")
|
||||
.replace(
|
||||
'<?xml version="1.0" encoding="UTF-8"?>'
|
||||
'<svg xmlns="http://www.w3.org/2000/svg"',
|
||||
"<svg",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@@ -51,16 +59,17 @@ def _generate_secret_and_qr_code(username: str) -> Tuple[str, str, str]:
|
||||
|
||||
ota_secret = pyotp.random_base32()
|
||||
url = pyotp.totp.TOTP(ota_secret).provisioning_uri(
|
||||
username, issuer_name="Home Assistant")
|
||||
username, issuer_name="Home Assistant"
|
||||
)
|
||||
image = _generate_qr_code(url)
|
||||
return ota_secret, url, image
|
||||
|
||||
|
||||
@MULTI_FACTOR_AUTH_MODULES.register('totp')
|
||||
@MULTI_FACTOR_AUTH_MODULES.register("totp")
|
||||
class TotpAuthModule(MultiFactorAuthModule):
|
||||
"""Auth module validate time-based one time password."""
|
||||
|
||||
DEFAULT_TITLE = 'Time-based One Time Password'
|
||||
DEFAULT_TITLE = "Time-based One Time Password"
|
||||
MAX_RETRY_TIME = 5
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: Dict[str, Any]) -> None:
|
||||
@@ -68,7 +77,8 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
super().__init__(hass, config)
|
||||
self._users = None # type: Optional[Dict[str, str]]
|
||||
self._user_store = hass.helpers.storage.Store(
|
||||
STORAGE_VERSION, STORAGE_KEY, private=True)
|
||||
STORAGE_VERSION, STORAGE_KEY, private=True
|
||||
)
|
||||
self._init_lock = asyncio.Lock()
|
||||
|
||||
@property
|
||||
@@ -93,14 +103,13 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
"""Save data."""
|
||||
await self._user_store.async_save({STORAGE_USERS: self._users})
|
||||
|
||||
def _add_ota_secret(self, user_id: str,
|
||||
secret: Optional[str] = None) -> str:
|
||||
def _add_ota_secret(self, user_id: str, secret: Optional[str] = None) -> str:
|
||||
"""Create a ota_secret for user."""
|
||||
import pyotp
|
||||
|
||||
ota_secret = secret or pyotp.random_base32() # type: str
|
||||
|
||||
self._users[user_id] = ota_secret # type: ignore
|
||||
self._users[user_id] = ota_secret # type: ignore
|
||||
return ota_secret
|
||||
|
||||
async def async_setup_flow(self, user_id: str) -> SetupFlow:
|
||||
@@ -108,7 +117,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
|
||||
Mfa module should extend SetupFlow
|
||||
"""
|
||||
user = await self.hass.auth.async_get_user(user_id) # type: ignore
|
||||
user = await self.hass.auth.async_get_user(user_id) # type: ignore
|
||||
return TotpSetupFlow(self, self.input_schema, user)
|
||||
|
||||
async def async_setup_user(self, user_id: str, setup_data: Any) -> str:
|
||||
@@ -117,7 +126,8 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
await self._async_load()
|
||||
|
||||
result = await self.hass.async_add_executor_job(
|
||||
self._add_ota_secret, user_id, setup_data.get('secret'))
|
||||
self._add_ota_secret, user_id, setup_data.get("secret")
|
||||
)
|
||||
|
||||
await self._async_save()
|
||||
return result
|
||||
@@ -127,7 +137,7 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
if self._users is None:
|
||||
await self._async_load()
|
||||
|
||||
if self._users.pop(user_id, None): # type: ignore
|
||||
if self._users.pop(user_id, None): # type: ignore
|
||||
await self._async_save()
|
||||
|
||||
async def async_is_user_setup(self, user_id: str) -> bool:
|
||||
@@ -135,10 +145,9 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
if self._users is None:
|
||||
await self._async_load()
|
||||
|
||||
return user_id in self._users # type: ignore
|
||||
return user_id in self._users # type: ignore
|
||||
|
||||
async def async_validate(
|
||||
self, user_id: str, user_input: Dict[str, Any]) -> bool:
|
||||
async def async_validate(self, user_id: str, user_input: Dict[str, Any]) -> bool:
|
||||
"""Return True if validation passed."""
|
||||
if self._users is None:
|
||||
await self._async_load()
|
||||
@@ -146,7 +155,8 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
# user_input has been validate in caller
|
||||
# set INPUT_FIELD_CODE as vol.Required is not user friendly
|
||||
return await self.hass.async_add_executor_job(
|
||||
self._validate_2fa, user_id, user_input.get(INPUT_FIELD_CODE, ''))
|
||||
self._validate_2fa, user_id, user_input.get(INPUT_FIELD_CODE, "")
|
||||
)
|
||||
|
||||
def _validate_2fa(self, user_id: str, code: str) -> bool:
|
||||
"""Validate two factor authentication code."""
|
||||
@@ -165,9 +175,9 @@ class TotpAuthModule(MultiFactorAuthModule):
|
||||
class TotpSetupFlow(SetupFlow):
|
||||
"""Handler for the setup flow."""
|
||||
|
||||
def __init__(self, auth_module: TotpAuthModule,
|
||||
setup_schema: vol.Schema,
|
||||
user: User) -> None:
|
||||
def __init__(
|
||||
self, auth_module: TotpAuthModule, setup_schema: vol.Schema, user: User
|
||||
) -> None:
|
||||
"""Initialize the setup flow."""
|
||||
super().__init__(auth_module, setup_schema, user.id)
|
||||
# to fix typing complaint
|
||||
@@ -178,8 +188,8 @@ class TotpSetupFlow(SetupFlow):
|
||||
self._image = None # type Optional[str]
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the first step of setup flow.
|
||||
|
||||
Return self.async_show_form(step_id='init') if user_input is None.
|
||||
@@ -191,30 +201,31 @@ class TotpSetupFlow(SetupFlow):
|
||||
|
||||
if user_input:
|
||||
verified = await self.hass.async_add_executor_job( # type: ignore
|
||||
pyotp.TOTP(self._ota_secret).verify, user_input['code'])
|
||||
pyotp.TOTP(self._ota_secret).verify, user_input["code"]
|
||||
)
|
||||
if verified:
|
||||
result = await self._auth_module.async_setup_user(
|
||||
self._user_id, {'secret': self._ota_secret})
|
||||
self._user_id, {"secret": self._ota_secret}
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=self._auth_module.name,
|
||||
data={'result': result}
|
||||
title=self._auth_module.name, data={"result": result}
|
||||
)
|
||||
|
||||
errors['base'] = 'invalid_code'
|
||||
errors["base"] = "invalid_code"
|
||||
|
||||
else:
|
||||
hass = self._auth_module.hass
|
||||
self._ota_secret, self._url, self._image = \
|
||||
await hass.async_add_executor_job( # type: ignore
|
||||
_generate_secret_and_qr_code, str(self._user.name))
|
||||
self._ota_secret, self._url, self._image = await hass.async_add_executor_job( # type: ignore
|
||||
_generate_secret_and_qr_code, str(self._user.name)
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
step_id="init",
|
||||
data_schema=self._setup_schema,
|
||||
description_placeholders={
|
||||
'code': self._ota_secret,
|
||||
'url': self._url,
|
||||
'qr_code': self._image
|
||||
"code": self._ota_secret,
|
||||
"url": self._url,
|
||||
"qr_code": self._image,
|
||||
},
|
||||
errors=errors
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@@ -11,9 +11,9 @@ from . import permissions as perm_mdl
|
||||
from .const import GROUP_ID_ADMIN
|
||||
from .util import generate_secret
|
||||
|
||||
TOKEN_TYPE_NORMAL = 'normal'
|
||||
TOKEN_TYPE_SYSTEM = 'system'
|
||||
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = 'long_lived_access_token'
|
||||
TOKEN_TYPE_NORMAL = "normal"
|
||||
TOKEN_TYPE_SYSTEM = "system"
|
||||
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN = "long_lived_access_token"
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
@@ -32,7 +32,7 @@ class User:
|
||||
|
||||
name = attr.ib(type=str) # type: Optional[str]
|
||||
perm_lookup = attr.ib(
|
||||
type=perm_mdl.PermissionLookup, cmp=False,
|
||||
type=perm_mdl.PermissionLookup, cmp=False
|
||||
) # type: perm_mdl.PermissionLookup
|
||||
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
|
||||
is_owner = attr.ib(type=bool, default=False)
|
||||
@@ -42,9 +42,7 @@ class User:
|
||||
groups = attr.ib(type=List, factory=list, cmp=False) # type: List[Group]
|
||||
|
||||
# List of credentials of a user.
|
||||
credentials = attr.ib(
|
||||
type=list, factory=list, cmp=False
|
||||
) # type: List[Credentials]
|
||||
credentials = attr.ib(type=list, factory=list, cmp=False) # type: List[Credentials]
|
||||
|
||||
# Tokens associated with a user.
|
||||
refresh_tokens = attr.ib(
|
||||
@@ -52,10 +50,7 @@ class User:
|
||||
) # type: Dict[str, RefreshToken]
|
||||
|
||||
_permissions = attr.ib(
|
||||
type=Optional[perm_mdl.PolicyPermissions],
|
||||
init=False,
|
||||
cmp=False,
|
||||
default=None,
|
||||
type=Optional[perm_mdl.PolicyPermissions], init=False, cmp=False, default=None
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -68,9 +63,9 @@ class User:
|
||||
return self._permissions
|
||||
|
||||
self._permissions = perm_mdl.PolicyPermissions(
|
||||
perm_mdl.merge_policies([
|
||||
group.policy for group in self.groups]),
|
||||
self.perm_lookup)
|
||||
perm_mdl.merge_policies([group.policy for group in self.groups]),
|
||||
self.perm_lookup,
|
||||
)
|
||||
|
||||
return self._permissions
|
||||
|
||||
@@ -80,8 +75,7 @@ class User:
|
||||
if self.is_owner:
|
||||
return True
|
||||
|
||||
return self.is_active and any(
|
||||
gr.id == GROUP_ID_ADMIN for gr in self.groups)
|
||||
return self.is_active and any(gr.id == GROUP_ID_ADMIN for gr in self.groups)
|
||||
|
||||
def invalidate_permission_cache(self) -> None:
|
||||
"""Invalidate permission cache."""
|
||||
@@ -97,10 +91,13 @@ class RefreshToken:
|
||||
access_token_expiration = attr.ib(type=timedelta)
|
||||
client_name = attr.ib(type=Optional[str], default=None)
|
||||
client_icon = attr.ib(type=Optional[str], default=None)
|
||||
token_type = attr.ib(type=str, default=TOKEN_TYPE_NORMAL,
|
||||
validator=attr.validators.in_((
|
||||
TOKEN_TYPE_NORMAL, TOKEN_TYPE_SYSTEM,
|
||||
TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN)))
|
||||
token_type = attr.ib(
|
||||
type=str,
|
||||
default=TOKEN_TYPE_NORMAL,
|
||||
validator=attr.validators.in_(
|
||||
(TOKEN_TYPE_NORMAL, TOKEN_TYPE_SYSTEM, TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN)
|
||||
),
|
||||
)
|
||||
id = attr.ib(type=str, factory=lambda: uuid.uuid4().hex)
|
||||
created_at = attr.ib(type=datetime, factory=dt_util.utcnow)
|
||||
token = attr.ib(type=str, factory=lambda: generate_secret(64))
|
||||
@@ -124,5 +121,4 @@ class Credentials:
|
||||
is_new = attr.ib(type=bool, default=True)
|
||||
|
||||
|
||||
UserMeta = NamedTuple("UserMeta",
|
||||
[('name', Optional[str]), ('is_active', bool)])
|
||||
UserMeta = NamedTuple("UserMeta", [("name", Optional[str]), ("is_active", bool)])
|
||||
|
||||
@@ -1,8 +1,17 @@
|
||||
"""Permissions for Home Assistant."""
|
||||
import logging
|
||||
from typing import ( # noqa: F401
|
||||
cast, Any, Callable, Dict, List, Mapping, Set, Tuple, Union,
|
||||
TYPE_CHECKING)
|
||||
cast,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
Set,
|
||||
Tuple,
|
||||
Union,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -14,9 +23,7 @@ from .merge import merge_policies # noqa
|
||||
from .util import test_all
|
||||
|
||||
|
||||
POLICY_SCHEMA = vol.Schema({
|
||||
vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA
|
||||
})
|
||||
POLICY_SCHEMA = vol.Schema({vol.Optional(CAT_ENTITIES): ENTITY_POLICY_SCHEMA})
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -47,8 +54,7 @@ class AbstractPermissions:
|
||||
class PolicyPermissions(AbstractPermissions):
|
||||
"""Handle permissions."""
|
||||
|
||||
def __init__(self, policy: PolicyType,
|
||||
perm_lookup: PermissionLookup) -> None:
|
||||
def __init__(self, policy: PolicyType, perm_lookup: PermissionLookup) -> None:
|
||||
"""Initialize the permission class."""
|
||||
self._policy = policy
|
||||
self._perm_lookup = perm_lookup
|
||||
@@ -59,14 +65,12 @@ class PolicyPermissions(AbstractPermissions):
|
||||
|
||||
def _entity_func(self) -> Callable[[str, str], bool]:
|
||||
"""Return a function that can test entity access."""
|
||||
return compile_entities(self._policy.get(CAT_ENTITIES),
|
||||
self._perm_lookup)
|
||||
return compile_entities(self._policy.get(CAT_ENTITIES), self._perm_lookup)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Equals check."""
|
||||
# pylint: disable=protected-access
|
||||
return (isinstance(other, PolicyPermissions) and
|
||||
other._policy == self._policy)
|
||||
return isinstance(other, PolicyPermissions) and other._policy == self._policy
|
||||
|
||||
|
||||
class _OwnerPermissions(AbstractPermissions):
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
"""Permission constants."""
|
||||
CAT_ENTITIES = 'entities'
|
||||
CAT_CONFIG_ENTRIES = 'config_entries'
|
||||
SUBCAT_ALL = 'all'
|
||||
CAT_ENTITIES = "entities"
|
||||
CAT_CONFIG_ENTRIES = "config_entries"
|
||||
SUBCAT_ALL = "all"
|
||||
|
||||
POLICY_READ = 'read'
|
||||
POLICY_CONTROL = 'control'
|
||||
POLICY_EDIT = 'edit'
|
||||
POLICY_READ = "read"
|
||||
POLICY_CONTROL = "control"
|
||||
POLICY_EDIT = "edit"
|
||||
|
||||
@@ -7,51 +7,59 @@ import voluptuous as vol
|
||||
from .const import SUBCAT_ALL, POLICY_READ, POLICY_CONTROL, POLICY_EDIT
|
||||
from .models import PermissionLookup
|
||||
from .types import CategoryType, SubCategoryDict, ValueType
|
||||
|
||||
# pylint: disable=unused-import
|
||||
from .util import SubCatLookupType, lookup_all, compile_policy # noqa
|
||||
|
||||
SINGLE_ENTITY_SCHEMA = vol.Any(True, vol.Schema({
|
||||
vol.Optional(POLICY_READ): True,
|
||||
vol.Optional(POLICY_CONTROL): True,
|
||||
vol.Optional(POLICY_EDIT): True,
|
||||
}))
|
||||
SINGLE_ENTITY_SCHEMA = vol.Any(
|
||||
True,
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(POLICY_READ): True,
|
||||
vol.Optional(POLICY_CONTROL): True,
|
||||
vol.Optional(POLICY_EDIT): True,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
ENTITY_DOMAINS = 'domains'
|
||||
ENTITY_AREAS = 'area_ids'
|
||||
ENTITY_DEVICE_IDS = 'device_ids'
|
||||
ENTITY_ENTITY_IDS = 'entity_ids'
|
||||
ENTITY_DOMAINS = "domains"
|
||||
ENTITY_AREAS = "area_ids"
|
||||
ENTITY_DEVICE_IDS = "device_ids"
|
||||
ENTITY_ENTITY_IDS = "entity_ids"
|
||||
|
||||
ENTITY_VALUES_SCHEMA = vol.Any(True, vol.Schema({
|
||||
str: SINGLE_ENTITY_SCHEMA
|
||||
}))
|
||||
ENTITY_VALUES_SCHEMA = vol.Any(True, vol.Schema({str: SINGLE_ENTITY_SCHEMA}))
|
||||
|
||||
ENTITY_POLICY_SCHEMA = vol.Any(True, vol.Schema({
|
||||
vol.Optional(SUBCAT_ALL): SINGLE_ENTITY_SCHEMA,
|
||||
vol.Optional(ENTITY_AREAS): ENTITY_VALUES_SCHEMA,
|
||||
vol.Optional(ENTITY_DEVICE_IDS): ENTITY_VALUES_SCHEMA,
|
||||
vol.Optional(ENTITY_DOMAINS): ENTITY_VALUES_SCHEMA,
|
||||
vol.Optional(ENTITY_ENTITY_IDS): ENTITY_VALUES_SCHEMA,
|
||||
}))
|
||||
ENTITY_POLICY_SCHEMA = vol.Any(
|
||||
True,
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(SUBCAT_ALL): SINGLE_ENTITY_SCHEMA,
|
||||
vol.Optional(ENTITY_AREAS): ENTITY_VALUES_SCHEMA,
|
||||
vol.Optional(ENTITY_DEVICE_IDS): ENTITY_VALUES_SCHEMA,
|
||||
vol.Optional(ENTITY_DOMAINS): ENTITY_VALUES_SCHEMA,
|
||||
vol.Optional(ENTITY_ENTITY_IDS): ENTITY_VALUES_SCHEMA,
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _lookup_domain(perm_lookup: PermissionLookup,
|
||||
domains_dict: SubCategoryDict,
|
||||
entity_id: str) -> Optional[ValueType]:
|
||||
def _lookup_domain(
|
||||
perm_lookup: PermissionLookup, domains_dict: SubCategoryDict, entity_id: str
|
||||
) -> Optional[ValueType]:
|
||||
"""Look up entity permissions by domain."""
|
||||
return domains_dict.get(entity_id.split(".", 1)[0])
|
||||
|
||||
|
||||
def _lookup_area(perm_lookup: PermissionLookup, area_dict: SubCategoryDict,
|
||||
entity_id: str) -> Optional[ValueType]:
|
||||
def _lookup_area(
|
||||
perm_lookup: PermissionLookup, area_dict: SubCategoryDict, entity_id: str
|
||||
) -> Optional[ValueType]:
|
||||
"""Look up entity permissions by area."""
|
||||
entity_entry = perm_lookup.entity_registry.async_get(entity_id)
|
||||
|
||||
if entity_entry is None or entity_entry.device_id is None:
|
||||
return None
|
||||
|
||||
device_entry = perm_lookup.device_registry.async_get(
|
||||
entity_entry.device_id
|
||||
)
|
||||
device_entry = perm_lookup.device_registry.async_get(entity_entry.device_id)
|
||||
|
||||
if device_entry is None or device_entry.area_id is None:
|
||||
return None
|
||||
@@ -59,9 +67,9 @@ def _lookup_area(perm_lookup: PermissionLookup, area_dict: SubCategoryDict,
|
||||
return area_dict.get(device_entry.area_id)
|
||||
|
||||
|
||||
def _lookup_device(perm_lookup: PermissionLookup,
|
||||
devices_dict: SubCategoryDict,
|
||||
entity_id: str) -> Optional[ValueType]:
|
||||
def _lookup_device(
|
||||
perm_lookup: PermissionLookup, devices_dict: SubCategoryDict, entity_id: str
|
||||
) -> Optional[ValueType]:
|
||||
"""Look up entity permissions by device."""
|
||||
entity_entry = perm_lookup.entity_registry.async_get(entity_id)
|
||||
|
||||
@@ -71,15 +79,16 @@ def _lookup_device(perm_lookup: PermissionLookup,
|
||||
return devices_dict.get(entity_entry.device_id)
|
||||
|
||||
|
||||
def _lookup_entity_id(perm_lookup: PermissionLookup,
|
||||
entities_dict: SubCategoryDict,
|
||||
entity_id: str) -> Optional[ValueType]:
|
||||
def _lookup_entity_id(
|
||||
perm_lookup: PermissionLookup, entities_dict: SubCategoryDict, entity_id: str
|
||||
) -> Optional[ValueType]:
|
||||
"""Look up entity permission by entity id."""
|
||||
return entities_dict.get(entity_id)
|
||||
|
||||
|
||||
def compile_entities(policy: CategoryType, perm_lookup: PermissionLookup) \
|
||||
-> Callable[[str, str], bool]:
|
||||
def compile_entities(
|
||||
policy: CategoryType, perm_lookup: PermissionLookup
|
||||
) -> Callable[[str, str], bool]:
|
||||
"""Compile policy into a function that tests policy."""
|
||||
subcategories = OrderedDict() # type: SubCatLookupType
|
||||
subcategories[ENTITY_ENTITY_IDS] = _lookup_entity_id
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""Merging of policies."""
|
||||
from typing import ( # noqa: F401
|
||||
cast, Dict, List, Set)
|
||||
from typing import cast, Dict, List, Set # noqa: F401
|
||||
|
||||
from .types import PolicyType, CategoryType
|
||||
|
||||
@@ -14,8 +13,9 @@ def merge_policies(policies: List[PolicyType]) -> PolicyType:
|
||||
if category in seen:
|
||||
continue
|
||||
seen.add(category)
|
||||
new_policy[category] = _merge_policies([
|
||||
policy.get(category) for policy in policies])
|
||||
new_policy[category] = _merge_policies(
|
||||
[policy.get(category) for policy in policies]
|
||||
)
|
||||
cast(PolicyType, new_policy)
|
||||
return new_policy
|
||||
|
||||
|
||||
@@ -5,17 +5,13 @@ import attr
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# pylint: disable=unused-import
|
||||
from homeassistant.helpers import ( # noqa
|
||||
entity_registry as ent_reg,
|
||||
)
|
||||
from homeassistant.helpers import ( # noqa
|
||||
device_registry as dev_reg,
|
||||
)
|
||||
from homeassistant.helpers import entity_registry as ent_reg # noqa
|
||||
from homeassistant.helpers import device_registry as dev_reg # noqa
|
||||
|
||||
|
||||
@attr.s(slots=True)
|
||||
class PermissionLookup:
|
||||
"""Class to hold data for permission lookups."""
|
||||
|
||||
entity_registry = attr.ib(type='ent_reg.EntityRegistry')
|
||||
device_registry = attr.ib(type='dev_reg.DeviceRegistry')
|
||||
entity_registry = attr.ib(type="ent_reg.EntityRegistry")
|
||||
device_registry = attr.ib(type="dev_reg.DeviceRegistry")
|
||||
|
||||
@@ -1,18 +1,8 @@
|
||||
"""System policies."""
|
||||
from .const import CAT_ENTITIES, SUBCAT_ALL, POLICY_READ
|
||||
|
||||
ADMIN_POLICY = {
|
||||
CAT_ENTITIES: True,
|
||||
}
|
||||
ADMIN_POLICY = {CAT_ENTITIES: True}
|
||||
|
||||
USER_POLICY = {
|
||||
CAT_ENTITIES: True,
|
||||
}
|
||||
USER_POLICY = {CAT_ENTITIES: True}
|
||||
|
||||
READ_ONLY_POLICY = {
|
||||
CAT_ENTITIES: {
|
||||
SUBCAT_ALL: {
|
||||
POLICY_READ: True
|
||||
}
|
||||
}
|
||||
}
|
||||
READ_ONLY_POLICY = {CAT_ENTITIES: {SUBCAT_ALL: {POLICY_READ: True}}}
|
||||
|
||||
@@ -7,17 +7,13 @@ ValueType = Union[
|
||||
# Example: entities.all = { read: true, control: true }
|
||||
Mapping[str, bool],
|
||||
bool,
|
||||
None
|
||||
None,
|
||||
]
|
||||
|
||||
# Example: entities.domains = { light: … }
|
||||
SubCategoryDict = Mapping[str, ValueType]
|
||||
|
||||
SubCategoryType = Union[
|
||||
SubCategoryDict,
|
||||
bool,
|
||||
None
|
||||
]
|
||||
SubCategoryType = Union[SubCategoryDict, bool, None]
|
||||
|
||||
CategoryType = Union[
|
||||
# Example: entities.domains
|
||||
@@ -25,7 +21,7 @@ CategoryType = Union[
|
||||
# Example: entities.all
|
||||
Mapping[str, ValueType],
|
||||
bool,
|
||||
None
|
||||
None,
|
||||
]
|
||||
|
||||
# Example: { entities: … }
|
||||
|
||||
@@ -1,34 +1,34 @@
|
||||
"""Helpers to deal with permissions."""
|
||||
from functools import wraps
|
||||
|
||||
from typing import Callable, Dict, List, Optional, Union, cast # noqa: F401
|
||||
from typing import Callable, Dict, List, Optional, cast # noqa: F401
|
||||
|
||||
from .const import SUBCAT_ALL
|
||||
from .models import PermissionLookup
|
||||
from .types import CategoryType, SubCategoryDict, ValueType
|
||||
|
||||
LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str],
|
||||
Optional[ValueType]]
|
||||
LookupFunc = Callable[[PermissionLookup, SubCategoryDict, str], Optional[ValueType]]
|
||||
SubCatLookupType = Dict[str, LookupFunc]
|
||||
|
||||
|
||||
def lookup_all(perm_lookup: PermissionLookup, lookup_dict: SubCategoryDict,
|
||||
object_id: str) -> ValueType:
|
||||
def lookup_all(
|
||||
perm_lookup: PermissionLookup, lookup_dict: SubCategoryDict, object_id: str
|
||||
) -> ValueType:
|
||||
"""Look up permission for all."""
|
||||
# In case of ALL category, lookup_dict IS the schema.
|
||||
return cast(ValueType, lookup_dict)
|
||||
|
||||
|
||||
def compile_policy(
|
||||
policy: CategoryType, subcategories: SubCatLookupType,
|
||||
perm_lookup: PermissionLookup
|
||||
) -> Callable[[str, str], bool]: # noqa
|
||||
policy: CategoryType, subcategories: SubCatLookupType, perm_lookup: PermissionLookup
|
||||
) -> Callable[[str, str], bool]: # noqa
|
||||
"""Compile policy into a function that tests policy.
|
||||
Subcategories are mapping key -> lookup function, ordered by highest
|
||||
priority first.
|
||||
"""
|
||||
# None, False, empty dict
|
||||
if not policy:
|
||||
|
||||
def apply_policy_deny_all(entity_id: str, key: str) -> bool:
|
||||
"""Decline all."""
|
||||
return False
|
||||
@@ -36,6 +36,7 @@ def compile_policy(
|
||||
return apply_policy_deny_all
|
||||
|
||||
if policy is True:
|
||||
|
||||
def apply_policy_allow_all(entity_id: str, key: str) -> bool:
|
||||
"""Approve all."""
|
||||
return True
|
||||
@@ -44,7 +45,7 @@ def compile_policy(
|
||||
|
||||
assert isinstance(policy, dict)
|
||||
|
||||
funcs = [] # type: List[Callable[[str, str], Union[None, bool]]]
|
||||
funcs = [] # type: List[Callable[[str, str], Optional[bool]]]
|
||||
|
||||
for key, lookup_func in subcategories.items():
|
||||
lookup_value = policy.get(key)
|
||||
@@ -54,8 +55,7 @@ def compile_policy(
|
||||
return lambda object_id, key: True
|
||||
|
||||
if lookup_value is not None:
|
||||
funcs.append(_gen_dict_test_func(
|
||||
perm_lookup, lookup_func, lookup_value))
|
||||
funcs.append(_gen_dict_test_func(perm_lookup, lookup_func, lookup_value))
|
||||
|
||||
if len(funcs) == 1:
|
||||
func = funcs[0]
|
||||
@@ -79,15 +79,13 @@ def compile_policy(
|
||||
|
||||
|
||||
def _gen_dict_test_func(
|
||||
perm_lookup: PermissionLookup,
|
||||
lookup_func: LookupFunc,
|
||||
lookup_dict: SubCategoryDict
|
||||
) -> Callable[[str, str], Optional[bool]]: # noqa
|
||||
perm_lookup: PermissionLookup, lookup_func: LookupFunc, lookup_dict: SubCategoryDict
|
||||
) -> Callable[[str, str], Optional[bool]]: # noqa
|
||||
"""Generate a lookup function."""
|
||||
|
||||
def test_value(object_id: str, key: str) -> Optional[bool]:
|
||||
"""Test if permission is allowed based on the keys."""
|
||||
schema = lookup_func(
|
||||
perm_lookup, lookup_dict, object_id) # type: ValueType
|
||||
schema = lookup_func(perm_lookup, lookup_dict, object_id) # type: ValueType
|
||||
|
||||
if schema is None or isinstance(schema, bool):
|
||||
return schema
|
||||
|
||||
@@ -19,25 +19,29 @@ from ..const import MFA_SESSION_EXPIRATION
|
||||
from ..models import Credentials, User, UserMeta # noqa: F401
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
DATA_REQS = 'auth_prov_reqs_processed'
|
||||
DATA_REQS = "auth_prov_reqs_processed"
|
||||
|
||||
AUTH_PROVIDERS = Registry()
|
||||
|
||||
AUTH_PROVIDER_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_TYPE): str,
|
||||
vol.Optional(CONF_NAME): str,
|
||||
# Specify ID if you have two auth providers for same type.
|
||||
vol.Optional(CONF_ID): str,
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
AUTH_PROVIDER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TYPE): str,
|
||||
vol.Optional(CONF_NAME): str,
|
||||
# Specify ID if you have two auth providers for same type.
|
||||
vol.Optional(CONF_ID): str,
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
class AuthProvider:
|
||||
"""Provider of user authentication."""
|
||||
|
||||
DEFAULT_TITLE = 'Unnamed auth provider'
|
||||
DEFAULT_TITLE = "Unnamed auth provider"
|
||||
|
||||
def __init__(self, hass: HomeAssistant, store: AuthStore,
|
||||
config: Dict[str, Any]) -> None:
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, store: AuthStore, config: Dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize an auth provider."""
|
||||
self.hass = hass
|
||||
self.store = store
|
||||
@@ -73,22 +77,22 @@ class AuthProvider:
|
||||
credentials
|
||||
for user in users
|
||||
for credentials in user.credentials
|
||||
if (credentials.auth_provider_type == self.type and
|
||||
credentials.auth_provider_id == self.id)
|
||||
if (
|
||||
credentials.auth_provider_type == self.type
|
||||
and credentials.auth_provider_id == self.id
|
||||
)
|
||||
]
|
||||
|
||||
@callback
|
||||
def async_create_credentials(self, data: Dict[str, str]) -> Credentials:
|
||||
"""Create credentials."""
|
||||
return Credentials(
|
||||
auth_provider_type=self.type,
|
||||
auth_provider_id=self.id,
|
||||
data=data,
|
||||
auth_provider_type=self.type, auth_provider_id=self.id, data=data
|
||||
)
|
||||
|
||||
# Implement by extending class
|
||||
|
||||
async def async_login_flow(self, context: Optional[Dict]) -> 'LoginFlow':
|
||||
async def async_login_flow(self, context: Optional[Dict]) -> "LoginFlow":
|
||||
"""Return the data flow for logging in with auth provider.
|
||||
|
||||
Auth provider should extend LoginFlow and return an instance.
|
||||
@@ -96,22 +100,28 @@ class AuthProvider:
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_get_or_create_credentials(
|
||||
self, flow_result: Dict[str, str]) -> Credentials:
|
||||
self, flow_result: Dict[str, str]
|
||||
) -> Credentials:
|
||||
"""Get credentials based on the flow result."""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_user_meta_for_credentials(
|
||||
self, credentials: Credentials) -> UserMeta:
|
||||
self, credentials: Credentials
|
||||
) -> UserMeta:
|
||||
"""Return extra user metadata for credentials.
|
||||
|
||||
Will be used to populate info when creating a new user.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_initialize(self) -> None:
|
||||
"""Initialize the auth provider."""
|
||||
pass
|
||||
|
||||
|
||||
async def auth_provider_from_config(
|
||||
hass: HomeAssistant, store: AuthStore,
|
||||
config: Dict[str, Any]) -> AuthProvider:
|
||||
hass: HomeAssistant, store: AuthStore, config: Dict[str, Any]
|
||||
) -> AuthProvider:
|
||||
"""Initialize an auth provider from a config."""
|
||||
provider_name = config[CONF_TYPE]
|
||||
module = await load_auth_provider_module(hass, provider_name)
|
||||
@@ -119,25 +129,31 @@ async def auth_provider_from_config(
|
||||
try:
|
||||
config = module.CONFIG_SCHEMA(config) # type: ignore
|
||||
except vol.Invalid as err:
|
||||
_LOGGER.error('Invalid configuration for auth provider %s: %s',
|
||||
provider_name, humanize_error(config, err))
|
||||
_LOGGER.error(
|
||||
"Invalid configuration for auth provider %s: %s",
|
||||
provider_name,
|
||||
humanize_error(config, err),
|
||||
)
|
||||
raise
|
||||
|
||||
return AUTH_PROVIDERS[provider_name](hass, store, config) # type: ignore
|
||||
|
||||
|
||||
async def load_auth_provider_module(
|
||||
hass: HomeAssistant, provider: str) -> types.ModuleType:
|
||||
hass: HomeAssistant, provider: str
|
||||
) -> types.ModuleType:
|
||||
"""Load an auth provider."""
|
||||
try:
|
||||
module = importlib.import_module(
|
||||
'homeassistant.auth.providers.{}'.format(provider))
|
||||
"homeassistant.auth.providers.{}".format(provider)
|
||||
)
|
||||
except ImportError as err:
|
||||
_LOGGER.error('Unable to load auth provider %s: %s', provider, err)
|
||||
raise HomeAssistantError('Unable to load auth provider {}: {}'.format(
|
||||
provider, err))
|
||||
_LOGGER.error("Unable to load auth provider %s: %s", provider, err)
|
||||
raise HomeAssistantError(
|
||||
"Unable to load auth provider {}: {}".format(provider, err)
|
||||
)
|
||||
|
||||
if hass.config.skip_pip or not hasattr(module, 'REQUIREMENTS'):
|
||||
if hass.config.skip_pip or not hasattr(module, "REQUIREMENTS"):
|
||||
return module
|
||||
|
||||
processed = hass.data.get(DATA_REQS)
|
||||
@@ -150,12 +166,13 @@ async def load_auth_provider_module(
|
||||
# https://github.com/python/mypy/issues/1424
|
||||
reqs = module.REQUIREMENTS # type: ignore
|
||||
req_success = await requirements.async_process_requirements(
|
||||
hass, 'auth provider {}'.format(provider), reqs)
|
||||
hass, "auth provider {}".format(provider), reqs
|
||||
)
|
||||
|
||||
if not req_success:
|
||||
raise HomeAssistantError(
|
||||
'Unable to process requirements of auth provider {}'.format(
|
||||
provider))
|
||||
"Unable to process requirements of auth provider {}".format(provider)
|
||||
)
|
||||
|
||||
processed.add(provider)
|
||||
return module
|
||||
@@ -175,8 +192,8 @@ class LoginFlow(data_entry_flow.FlowHandler):
|
||||
self.user = None # type: Optional[User]
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the first step of login flow.
|
||||
|
||||
Return self.async_show_form(step_id='init') if user_input is None.
|
||||
@@ -185,80 +202,75 @@ class LoginFlow(data_entry_flow.FlowHandler):
|
||||
raise NotImplementedError
|
||||
|
||||
async def async_step_select_mfa_module(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the step of select mfa module."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
auth_module = user_input.get('multi_factor_auth_module')
|
||||
auth_module = user_input.get("multi_factor_auth_module")
|
||||
if auth_module in self.available_mfa_modules:
|
||||
self._auth_module_id = auth_module
|
||||
return await self.async_step_mfa()
|
||||
errors['base'] = 'invalid_auth_module'
|
||||
errors["base"] = "invalid_auth_module"
|
||||
|
||||
if len(self.available_mfa_modules) == 1:
|
||||
self._auth_module_id = list(self.available_mfa_modules.keys())[0]
|
||||
return await self.async_step_mfa()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='select_mfa_module',
|
||||
data_schema=vol.Schema({
|
||||
'multi_factor_auth_module': vol.In(self.available_mfa_modules)
|
||||
}),
|
||||
step_id="select_mfa_module",
|
||||
data_schema=vol.Schema(
|
||||
{"multi_factor_auth_module": vol.In(self.available_mfa_modules)}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_mfa(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the step of mfa validation."""
|
||||
assert self.user
|
||||
|
||||
errors = {}
|
||||
|
||||
auth_module = self._auth_manager.get_auth_mfa_module(
|
||||
self._auth_module_id)
|
||||
auth_module = self._auth_manager.get_auth_mfa_module(self._auth_module_id)
|
||||
if auth_module is None:
|
||||
# Given an invalid input to async_step_select_mfa_module
|
||||
# will show invalid_auth_module error
|
||||
return await self.async_step_select_mfa_module(user_input={})
|
||||
|
||||
if user_input is None and hasattr(auth_module,
|
||||
'async_initialize_login_mfa_step'):
|
||||
if user_input is None and hasattr(
|
||||
auth_module, "async_initialize_login_mfa_step"
|
||||
):
|
||||
try:
|
||||
await auth_module.async_initialize_login_mfa_step(self.user.id)
|
||||
except HomeAssistantError:
|
||||
_LOGGER.exception('Error initializing MFA step')
|
||||
return self.async_abort(reason='unknown_error')
|
||||
_LOGGER.exception("Error initializing MFA step")
|
||||
return self.async_abort(reason="unknown_error")
|
||||
|
||||
if user_input is not None:
|
||||
expires = self.created_at + MFA_SESSION_EXPIRATION
|
||||
if dt_util.utcnow() > expires:
|
||||
return self.async_abort(
|
||||
reason='login_expired'
|
||||
)
|
||||
return self.async_abort(reason="login_expired")
|
||||
|
||||
result = await auth_module.async_validate(
|
||||
self.user.id, user_input)
|
||||
result = await auth_module.async_validate(self.user.id, user_input)
|
||||
if not result:
|
||||
errors['base'] = 'invalid_code'
|
||||
errors["base"] = "invalid_code"
|
||||
self.invalid_mfa_times += 1
|
||||
if self.invalid_mfa_times >= auth_module.MAX_RETRY_TIME > 0:
|
||||
return self.async_abort(
|
||||
reason='too_many_retry'
|
||||
)
|
||||
return self.async_abort(reason="too_many_retry")
|
||||
|
||||
if not errors:
|
||||
return await self.async_finish(self.user)
|
||||
|
||||
description_placeholders = {
|
||||
'mfa_module_name': auth_module.name,
|
||||
'mfa_module_id': auth_module.id,
|
||||
"mfa_module_name": auth_module.name,
|
||||
"mfa_module_id": auth_module.id,
|
||||
} # type: Dict[str, Optional[str]]
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='mfa',
|
||||
step_id="mfa",
|
||||
data_schema=auth_module.input_schema,
|
||||
description_placeholders=description_placeholders,
|
||||
errors=errors,
|
||||
@@ -266,7 +278,4 @@ class LoginFlow(data_entry_flow.FlowHandler):
|
||||
|
||||
async def async_finish(self, flow_result: Any) -> Dict:
|
||||
"""Handle the pass of login flow."""
|
||||
return self.async_create_entry(
|
||||
title=self._auth_provider.name,
|
||||
data=flow_result
|
||||
)
|
||||
return self.async_create_entry(title=self._auth_provider.name, data=flow_result)
|
||||
|
||||
@@ -19,15 +19,16 @@ CONF_COMMAND = "command"
|
||||
CONF_ARGS = "args"
|
||||
CONF_META = "meta"
|
||||
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend({
|
||||
vol.Required(CONF_COMMAND): vol.All(
|
||||
str,
|
||||
os.path.normpath,
|
||||
msg="must be an absolute path"
|
||||
),
|
||||
vol.Optional(CONF_ARGS, default=None): vol.Any(vol.DefaultTo(list), [str]),
|
||||
vol.Optional(CONF_META, default=False): bool,
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_COMMAND): vol.All(
|
||||
str, os.path.normpath, msg="must be an absolute path"
|
||||
),
|
||||
vol.Optional(CONF_ARGS, default=None): vol.Any(vol.DefaultTo(list), [str]),
|
||||
vol.Optional(CONF_META, default=False): bool,
|
||||
},
|
||||
extra=vol.PREVENT_EXTRA,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -60,29 +61,27 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
|
||||
async def async_validate_login(self, username: str, password: str) -> None:
|
||||
"""Validate a username and password."""
|
||||
env = {
|
||||
"username": username,
|
||||
"password": password,
|
||||
}
|
||||
env = {"username": username, "password": password}
|
||||
try:
|
||||
# pylint: disable=no-member
|
||||
process = await asyncio.subprocess.create_subprocess_exec(
|
||||
self.config[CONF_COMMAND], *self.config[CONF_ARGS],
|
||||
self.config[CONF_COMMAND],
|
||||
*self.config[CONF_ARGS],
|
||||
env=env,
|
||||
stdout=asyncio.subprocess.PIPE
|
||||
if self.config[CONF_META] else None,
|
||||
stdout=asyncio.subprocess.PIPE if self.config[CONF_META] else None,
|
||||
)
|
||||
stdout, _ = (await process.communicate())
|
||||
stdout, _ = await process.communicate()
|
||||
except OSError as err:
|
||||
# happens when command doesn't exist or permission is denied
|
||||
_LOGGER.error("Error while authenticating %r: %s",
|
||||
username, err)
|
||||
_LOGGER.error("Error while authenticating %r: %s", username, err)
|
||||
raise InvalidAuthError
|
||||
|
||||
if process.returncode != 0:
|
||||
_LOGGER.error("User %r failed to authenticate, command exited "
|
||||
"with code %d.",
|
||||
username, process.returncode)
|
||||
_LOGGER.error(
|
||||
"User %r failed to authenticate, command exited " "with code %d.",
|
||||
username,
|
||||
process.returncode,
|
||||
)
|
||||
raise InvalidAuthError
|
||||
|
||||
if self.config[CONF_META]:
|
||||
@@ -103,7 +102,7 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
self._user_meta[username] = meta
|
||||
|
||||
async def async_get_or_create_credentials(
|
||||
self, flow_result: Dict[str, str]
|
||||
self, flow_result: Dict[str, str]
|
||||
) -> Credentials:
|
||||
"""Get credentials based on the flow result."""
|
||||
username = flow_result["username"]
|
||||
@@ -112,29 +111,24 @@ class CommandLineAuthProvider(AuthProvider):
|
||||
return credential
|
||||
|
||||
# Create new credentials.
|
||||
return self.async_create_credentials({
|
||||
"username": username,
|
||||
})
|
||||
return self.async_create_credentials({"username": username})
|
||||
|
||||
async def async_user_meta_for_credentials(
|
||||
self, credentials: Credentials
|
||||
self, credentials: Credentials
|
||||
) -> UserMeta:
|
||||
"""Return extra user metadata for credentials.
|
||||
|
||||
Currently, only name is supported.
|
||||
"""
|
||||
meta = self._user_meta.get(credentials.data["username"], {})
|
||||
return UserMeta(
|
||||
name=meta.get("name"),
|
||||
is_active=True,
|
||||
)
|
||||
return UserMeta(name=meta.get("name"), is_active=True)
|
||||
|
||||
|
||||
class CommandLineLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the step of the form."""
|
||||
errors = {}
|
||||
@@ -142,10 +136,9 @@ class CommandLineLoginFlow(LoginFlow):
|
||||
if user_input is not None:
|
||||
user_input["username"] = user_input["username"].strip()
|
||||
try:
|
||||
await cast(CommandLineAuthProvider, self._auth_provider) \
|
||||
.async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
await cast(
|
||||
CommandLineAuthProvider, self._auth_provider
|
||||
).async_validate_login(user_input["username"], user_input["password"])
|
||||
except InvalidAuthError:
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
@@ -158,7 +151,5 @@ class CommandLineLoginFlow(LoginFlow):
|
||||
schema["password"] = str
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=vol.Schema(schema),
|
||||
errors=errors,
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
)
|
||||
|
||||
@@ -19,14 +19,13 @@ from ..models import Credentials, UserMeta
|
||||
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_KEY = 'auth_provider.homeassistant'
|
||||
STORAGE_KEY = "auth_provider.homeassistant"
|
||||
|
||||
|
||||
def _disallow_id(conf: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Disallow ID in config."""
|
||||
if CONF_ID in conf:
|
||||
raise vol.Invalid(
|
||||
'ID is not allowed for the homeassistant auth provider.')
|
||||
raise vol.Invalid("ID is not allowed for the homeassistant auth provider.")
|
||||
|
||||
return conf
|
||||
|
||||
@@ -51,8 +50,9 @@ class Data:
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the user data store."""
|
||||
self.hass = hass
|
||||
self._store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY,
|
||||
private=True)
|
||||
self._store = hass.helpers.storage.Store(
|
||||
STORAGE_VERSION, STORAGE_KEY, private=True
|
||||
)
|
||||
self._data = None # type: Optional[Dict[str, Any]]
|
||||
# Legacy mode will allow usernames to start/end with whitespace
|
||||
# and will compare usernames case-insensitive.
|
||||
@@ -72,14 +72,12 @@ class Data:
|
||||
data = await self._store.async_load()
|
||||
|
||||
if data is None:
|
||||
data = {
|
||||
'users': []
|
||||
}
|
||||
data = {"users": []}
|
||||
|
||||
seen = set() # type: Set[str]
|
||||
|
||||
for user in data['users']:
|
||||
username = user['username']
|
||||
for user in data["users"]:
|
||||
username = user["username"]
|
||||
|
||||
# check if we have duplicates
|
||||
folded = username.casefold()
|
||||
@@ -90,7 +88,9 @@ class Data:
|
||||
logging.getLogger(__name__).warning(
|
||||
"Home Assistant auth provider is running in legacy mode "
|
||||
"because we detected usernames that are case-insensitive"
|
||||
"equivalent. Please change the username: '%s'.", username)
|
||||
"equivalent. Please change the username: '%s'.",
|
||||
username,
|
||||
)
|
||||
|
||||
break
|
||||
|
||||
@@ -103,7 +103,9 @@ class Data:
|
||||
logging.getLogger(__name__).warning(
|
||||
"Home Assistant auth provider is running in legacy mode "
|
||||
"because we detected usernames that start or end in a "
|
||||
"space. Please change the username: '%s'.", username)
|
||||
"space. Please change the username: '%s'.",
|
||||
username,
|
||||
)
|
||||
|
||||
break
|
||||
|
||||
@@ -112,7 +114,7 @@ class Data:
|
||||
@property
|
||||
def users(self) -> List[Dict[str, str]]:
|
||||
"""Return users."""
|
||||
return self._data['users'] # type: ignore
|
||||
return self._data["users"] # type: ignore
|
||||
|
||||
def validate_login(self, username: str, password: str) -> None:
|
||||
"""Validate a username and password.
|
||||
@@ -120,32 +122,30 @@ class Data:
|
||||
Raises InvalidAuth if auth invalid.
|
||||
"""
|
||||
username = self.normalize_username(username)
|
||||
dummy = b'$2b$12$CiuFGszHx9eNHxPuQcwBWez4CwDTOcLTX5CbOpV6gef2nYuXkY7BO'
|
||||
dummy = b"$2b$12$CiuFGszHx9eNHxPuQcwBWez4CwDTOcLTX5CbOpV6gef2nYuXkY7BO"
|
||||
found = None
|
||||
|
||||
# Compare all users to avoid timing attacks.
|
||||
for user in self.users:
|
||||
if self.normalize_username(user['username']) == username:
|
||||
if self.normalize_username(user["username"]) == username:
|
||||
found = user
|
||||
|
||||
if found is None:
|
||||
# check a hash to make timing the same as if user was found
|
||||
bcrypt.checkpw(b'foo',
|
||||
dummy)
|
||||
bcrypt.checkpw(b"foo", dummy)
|
||||
raise InvalidAuth
|
||||
|
||||
user_hash = base64.b64decode(found['password'])
|
||||
user_hash = base64.b64decode(found["password"])
|
||||
|
||||
# bcrypt.checkpw is timing-safe
|
||||
if not bcrypt.checkpw(password.encode(),
|
||||
user_hash):
|
||||
if not bcrypt.checkpw(password.encode(), user_hash):
|
||||
raise InvalidAuth
|
||||
|
||||
# pylint: disable=no-self-use
|
||||
def hash_password(self, password: str, for_storage: bool = False) -> bytes:
|
||||
"""Encode a password."""
|
||||
hashed = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12)) \
|
||||
# type: bytes
|
||||
hashed: bytes = bcrypt.hashpw(password.encode(), bcrypt.gensalt(rounds=12))
|
||||
|
||||
if for_storage:
|
||||
hashed = base64.b64encode(hashed)
|
||||
return hashed
|
||||
@@ -154,14 +154,17 @@ class Data:
|
||||
"""Add a new authenticated user/pass."""
|
||||
username = self.normalize_username(username)
|
||||
|
||||
if any(self.normalize_username(user['username']) == username
|
||||
for user in self.users):
|
||||
if any(
|
||||
self.normalize_username(user["username"]) == username for user in self.users
|
||||
):
|
||||
raise InvalidUser
|
||||
|
||||
self.users.append({
|
||||
'username': username,
|
||||
'password': self.hash_password(password, True).decode(),
|
||||
})
|
||||
self.users.append(
|
||||
{
|
||||
"username": username,
|
||||
"password": self.hash_password(password, True).decode(),
|
||||
}
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_remove_auth(self, username: str) -> None:
|
||||
@@ -170,7 +173,7 @@ class Data:
|
||||
|
||||
index = None
|
||||
for i, user in enumerate(self.users):
|
||||
if self.normalize_username(user['username']) == username:
|
||||
if self.normalize_username(user["username"]) == username:
|
||||
index = i
|
||||
break
|
||||
|
||||
@@ -187,9 +190,8 @@ class Data:
|
||||
username = self.normalize_username(username)
|
||||
|
||||
for user in self.users:
|
||||
if self.normalize_username(user['username']) == username:
|
||||
user['password'] = self.hash_password(
|
||||
new_password, True).decode()
|
||||
if self.normalize_username(user["username"]) == username:
|
||||
user["password"] = self.hash_password(new_password, True).decode()
|
||||
break
|
||||
else:
|
||||
raise InvalidUser
|
||||
@@ -199,11 +201,11 @@ class Data:
|
||||
await self._store.async_save(self._data)
|
||||
|
||||
|
||||
@AUTH_PROVIDERS.register('homeassistant')
|
||||
@AUTH_PROVIDERS.register("homeassistant")
|
||||
class HassAuthProvider(AuthProvider):
|
||||
"""Auth provider based on a local storage of users in HASS config dir."""
|
||||
|
||||
DEFAULT_TITLE = 'Home Assistant Local'
|
||||
DEFAULT_TITLE = "Home Assistant Local"
|
||||
|
||||
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
||||
"""Initialize an Home Assistant auth provider."""
|
||||
@@ -221,8 +223,7 @@ class HassAuthProvider(AuthProvider):
|
||||
await data.async_load()
|
||||
self.data = data
|
||||
|
||||
async def async_login_flow(
|
||||
self, context: Optional[Dict]) -> LoginFlow:
|
||||
async def async_login_flow(self, context: Optional[Dict]) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
return HassLoginFlow(self)
|
||||
|
||||
@@ -233,41 +234,41 @@ class HassAuthProvider(AuthProvider):
|
||||
assert self.data is not None
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self.data.validate_login, username, password)
|
||||
self.data.validate_login, username, password
|
||||
)
|
||||
|
||||
async def async_get_or_create_credentials(
|
||||
self, flow_result: Dict[str, str]) -> Credentials:
|
||||
self, flow_result: Dict[str, str]
|
||||
) -> Credentials:
|
||||
"""Get credentials based on the flow result."""
|
||||
if self.data is None:
|
||||
await self.async_initialize()
|
||||
assert self.data is not None
|
||||
|
||||
norm_username = self.data.normalize_username
|
||||
username = norm_username(flow_result['username'])
|
||||
username = norm_username(flow_result["username"])
|
||||
|
||||
for credential in await self.async_credentials():
|
||||
if norm_username(credential.data['username']) == username:
|
||||
if norm_username(credential.data["username"]) == username:
|
||||
return credential
|
||||
|
||||
# Create new credentials.
|
||||
return self.async_create_credentials({
|
||||
'username': username
|
||||
})
|
||||
return self.async_create_credentials({"username": username})
|
||||
|
||||
async def async_user_meta_for_credentials(
|
||||
self, credentials: Credentials) -> UserMeta:
|
||||
self, credentials: Credentials
|
||||
) -> UserMeta:
|
||||
"""Get extra info for this credential."""
|
||||
return UserMeta(name=credentials.data['username'], is_active=True)
|
||||
return UserMeta(name=credentials.data["username"], is_active=True)
|
||||
|
||||
async def async_will_remove_credentials(
|
||||
self, credentials: Credentials) -> None:
|
||||
async def async_will_remove_credentials(self, credentials: Credentials) -> None:
|
||||
"""When credentials get removed, also remove the auth."""
|
||||
if self.data is None:
|
||||
await self.async_initialize()
|
||||
assert self.data is not None
|
||||
|
||||
try:
|
||||
self.data.async_remove_auth(credentials.data['username'])
|
||||
self.data.async_remove_auth(credentials.data["username"])
|
||||
await self.data.async_save()
|
||||
except InvalidUser:
|
||||
# Can happen if somehow we didn't clean up a credential
|
||||
@@ -278,29 +279,27 @@ class HassLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the step of the form."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await cast(HassAuthProvider, self._auth_provider)\
|
||||
.async_validate_login(user_input['username'],
|
||||
user_input['password'])
|
||||
await cast(HassAuthProvider, self._auth_provider).async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuth:
|
||||
errors['base'] = 'invalid_auth'
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
if not errors:
|
||||
user_input.pop('password')
|
||||
user_input.pop("password")
|
||||
return await self.async_finish(user_input)
|
||||
|
||||
schema = OrderedDict() # type: Dict[str, type]
|
||||
schema['username'] = str
|
||||
schema['password'] = str
|
||||
schema["username"] = str
|
||||
schema["password"] = str
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
data_schema=vol.Schema(schema),
|
||||
errors=errors,
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
)
|
||||
|
||||
@@ -12,23 +12,25 @@ from . import AuthProvider, AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, LoginFlow
|
||||
from ..models import Credentials, UserMeta
|
||||
|
||||
|
||||
USER_SCHEMA = vol.Schema({
|
||||
vol.Required('username'): str,
|
||||
vol.Required('password'): str,
|
||||
vol.Optional('name'): str,
|
||||
})
|
||||
USER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required("username"): str,
|
||||
vol.Required("password"): str,
|
||||
vol.Optional("name"): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend({
|
||||
vol.Required('users'): [USER_SCHEMA]
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
||||
{vol.Required("users"): [USER_SCHEMA]}, extra=vol.PREVENT_EXTRA
|
||||
)
|
||||
|
||||
|
||||
class InvalidAuthError(HomeAssistantError):
|
||||
"""Raised when submitting invalid authentication."""
|
||||
|
||||
|
||||
@AUTH_PROVIDERS.register('insecure_example')
|
||||
@AUTH_PROVIDERS.register("insecure_example")
|
||||
class ExampleAuthProvider(AuthProvider):
|
||||
"""Example auth provider based on hardcoded usernames and passwords."""
|
||||
|
||||
@@ -42,47 +44,48 @@ class ExampleAuthProvider(AuthProvider):
|
||||
user = None
|
||||
|
||||
# Compare all users to avoid timing attacks.
|
||||
for usr in self.config['users']:
|
||||
if hmac.compare_digest(username.encode('utf-8'),
|
||||
usr['username'].encode('utf-8')):
|
||||
for usr in self.config["users"]:
|
||||
if hmac.compare_digest(
|
||||
username.encode("utf-8"), usr["username"].encode("utf-8")
|
||||
):
|
||||
user = usr
|
||||
|
||||
if user is None:
|
||||
# Do one more compare to make timing the same as if user was found.
|
||||
hmac.compare_digest(password.encode('utf-8'),
|
||||
password.encode('utf-8'))
|
||||
hmac.compare_digest(password.encode("utf-8"), password.encode("utf-8"))
|
||||
raise InvalidAuthError
|
||||
|
||||
if not hmac.compare_digest(user['password'].encode('utf-8'),
|
||||
password.encode('utf-8')):
|
||||
if not hmac.compare_digest(
|
||||
user["password"].encode("utf-8"), password.encode("utf-8")
|
||||
):
|
||||
raise InvalidAuthError
|
||||
|
||||
async def async_get_or_create_credentials(
|
||||
self, flow_result: Dict[str, str]) -> Credentials:
|
||||
self, flow_result: Dict[str, str]
|
||||
) -> Credentials:
|
||||
"""Get credentials based on the flow result."""
|
||||
username = flow_result['username']
|
||||
username = flow_result["username"]
|
||||
|
||||
for credential in await self.async_credentials():
|
||||
if credential.data['username'] == username:
|
||||
if credential.data["username"] == username:
|
||||
return credential
|
||||
|
||||
# Create new credentials.
|
||||
return self.async_create_credentials({
|
||||
'username': username
|
||||
})
|
||||
return self.async_create_credentials({"username": username})
|
||||
|
||||
async def async_user_meta_for_credentials(
|
||||
self, credentials: Credentials) -> UserMeta:
|
||||
self, credentials: Credentials
|
||||
) -> UserMeta:
|
||||
"""Return extra user metadata for credentials.
|
||||
|
||||
Will be used to populate info when creating a new user.
|
||||
"""
|
||||
username = credentials.data['username']
|
||||
username = credentials.data["username"]
|
||||
name = None
|
||||
|
||||
for user in self.config['users']:
|
||||
if user['username'] == username:
|
||||
name = user.get('name')
|
||||
for user in self.config["users"]:
|
||||
if user["username"] == username:
|
||||
name = user.get("name")
|
||||
break
|
||||
|
||||
return UserMeta(name=name, is_active=True)
|
||||
@@ -92,29 +95,27 @@ class ExampleLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the step of the form."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
cast(ExampleAuthProvider, self._auth_provider)\
|
||||
.async_validate_login(user_input['username'],
|
||||
user_input['password'])
|
||||
cast(ExampleAuthProvider, self._auth_provider).async_validate_login(
|
||||
user_input["username"], user_input["password"]
|
||||
)
|
||||
except InvalidAuthError:
|
||||
errors['base'] = 'invalid_auth'
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
if not errors:
|
||||
user_input.pop('password')
|
||||
user_input.pop("password")
|
||||
return await self.async_finish(user_input)
|
||||
|
||||
schema = OrderedDict() # type: Dict[str, type]
|
||||
schema['username'] = str
|
||||
schema['password'] = str
|
||||
schema["username"] = str
|
||||
schema["password"] = str
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
data_schema=vol.Schema(schema),
|
||||
errors=errors,
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
)
|
||||
|
||||
@@ -16,27 +16,26 @@ from . import AuthProvider, AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, LoginFlow
|
||||
from .. import AuthManager
|
||||
from ..models import Credentials, UserMeta, User
|
||||
|
||||
AUTH_PROVIDER_TYPE = 'legacy_api_password'
|
||||
CONF_API_PASSWORD = 'api_password'
|
||||
AUTH_PROVIDER_TYPE = "legacy_api_password"
|
||||
CONF_API_PASSWORD = "api_password"
|
||||
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend({
|
||||
vol.Required(CONF_API_PASSWORD): cv.string,
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
||||
{vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA
|
||||
)
|
||||
|
||||
LEGACY_USER_NAME = 'Legacy API password user'
|
||||
LEGACY_USER_NAME = "Legacy API password user"
|
||||
|
||||
|
||||
class InvalidAuthError(HomeAssistantError):
|
||||
"""Raised when submitting invalid authentication."""
|
||||
|
||||
|
||||
async def async_validate_password(hass: HomeAssistant, password: str)\
|
||||
-> Optional[User]:
|
||||
async def async_validate_password(hass: HomeAssistant, password: str) -> Optional[User]:
|
||||
"""Return a user if password is valid. None if not."""
|
||||
auth = cast(AuthManager, hass.auth) # type: ignore
|
||||
providers = auth.get_auth_providers(AUTH_PROVIDER_TYPE)
|
||||
if not providers:
|
||||
raise ValueError('Legacy API password provider not found')
|
||||
raise ValueError("Legacy API password provider not found")
|
||||
|
||||
try:
|
||||
provider = cast(LegacyApiPasswordAuthProvider, providers[0])
|
||||
@@ -52,7 +51,7 @@ async def async_validate_password(hass: HomeAssistant, password: str)\
|
||||
class LegacyApiPasswordAuthProvider(AuthProvider):
|
||||
"""An auth provider support legacy api_password."""
|
||||
|
||||
DEFAULT_TITLE = 'Legacy API Password'
|
||||
DEFAULT_TITLE = "Legacy API Password"
|
||||
|
||||
@property
|
||||
def api_password(self) -> str:
|
||||
@@ -68,12 +67,14 @@ class LegacyApiPasswordAuthProvider(AuthProvider):
|
||||
"""Validate password."""
|
||||
api_password = str(self.config[CONF_API_PASSWORD])
|
||||
|
||||
if not hmac.compare_digest(api_password.encode('utf-8'),
|
||||
password.encode('utf-8')):
|
||||
if not hmac.compare_digest(
|
||||
api_password.encode("utf-8"), password.encode("utf-8")
|
||||
):
|
||||
raise InvalidAuthError
|
||||
|
||||
async def async_get_or_create_credentials(
|
||||
self, flow_result: Dict[str, str]) -> Credentials:
|
||||
self, flow_result: Dict[str, str]
|
||||
) -> Credentials:
|
||||
"""Return credentials for this login."""
|
||||
credentials = await self.async_credentials()
|
||||
if credentials:
|
||||
@@ -82,7 +83,8 @@ class LegacyApiPasswordAuthProvider(AuthProvider):
|
||||
return self.async_create_credentials({})
|
||||
|
||||
async def async_user_meta_for_credentials(
|
||||
self, credentials: Credentials) -> UserMeta:
|
||||
self, credentials: Credentials
|
||||
) -> UserMeta:
|
||||
"""
|
||||
Return info for the user.
|
||||
|
||||
@@ -95,23 +97,22 @@ class LegacyLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the step of the form."""
|
||||
errors = {}
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
cast(LegacyApiPasswordAuthProvider, self._auth_provider)\
|
||||
.async_validate_login(user_input['password'])
|
||||
cast(
|
||||
LegacyApiPasswordAuthProvider, self._auth_provider
|
||||
).async_validate_login(user_input["password"])
|
||||
except InvalidAuthError:
|
||||
errors['base'] = 'invalid_auth'
|
||||
errors["base"] = "invalid_auth"
|
||||
|
||||
if not errors:
|
||||
return await self.async_finish({})
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
data_schema=vol.Schema({'password': str}),
|
||||
errors=errors,
|
||||
step_id="init", data_schema=vol.Schema({"password": str}), errors=errors
|
||||
)
|
||||
|
||||
@@ -3,8 +3,7 @@
|
||||
It shows list of users if access from trusted network.
|
||||
Abort login flow if not access from trusted network.
|
||||
"""
|
||||
from ipaddress import ip_network, IPv4Address, IPv6Address, IPv4Network,\
|
||||
IPv6Network
|
||||
from ipaddress import ip_network, IPv4Address, IPv6Address, IPv4Network, IPv6Network
|
||||
from typing import Any, Dict, List, Optional, Union, cast
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -18,27 +17,32 @@ from ..models import Credentials, UserMeta
|
||||
IPAddress = Union[IPv4Address, IPv6Address]
|
||||
IPNetwork = Union[IPv4Network, IPv6Network]
|
||||
|
||||
CONF_TRUSTED_NETWORKS = 'trusted_networks'
|
||||
CONF_TRUSTED_USERS = 'trusted_users'
|
||||
CONF_GROUP = 'group'
|
||||
CONF_ALLOW_BYPASS_LOGIN = 'allow_bypass_login'
|
||||
CONF_TRUSTED_NETWORKS = "trusted_networks"
|
||||
CONF_TRUSTED_USERS = "trusted_users"
|
||||
CONF_GROUP = "group"
|
||||
CONF_ALLOW_BYPASS_LOGIN = "allow_bypass_login"
|
||||
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend({
|
||||
vol.Required(CONF_TRUSTED_NETWORKS): vol.All(
|
||||
cv.ensure_list, [ip_network]
|
||||
),
|
||||
vol.Optional(CONF_TRUSTED_USERS, default={}): vol.Schema(
|
||||
# we only validate the format of user_id or group_id
|
||||
{ip_network: vol.All(
|
||||
cv.ensure_list,
|
||||
[vol.Or(
|
||||
cv.uuid4_hex,
|
||||
vol.Schema({vol.Required(CONF_GROUP): cv.uuid4_hex}),
|
||||
)],
|
||||
)}
|
||||
),
|
||||
vol.Optional(CONF_ALLOW_BYPASS_LOGIN, default=False): cv.boolean,
|
||||
}, extra=vol.PREVENT_EXTRA)
|
||||
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_TRUSTED_NETWORKS): vol.All(cv.ensure_list, [ip_network]),
|
||||
vol.Optional(CONF_TRUSTED_USERS, default={}): vol.Schema(
|
||||
# we only validate the format of user_id or group_id
|
||||
{
|
||||
ip_network: vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
vol.Or(
|
||||
cv.uuid4_hex,
|
||||
vol.Schema({vol.Required(CONF_GROUP): cv.uuid4_hex}),
|
||||
)
|
||||
],
|
||||
)
|
||||
}
|
||||
),
|
||||
vol.Optional(CONF_ALLOW_BYPASS_LOGIN, default=False): cv.boolean,
|
||||
},
|
||||
extra=vol.PREVENT_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
class InvalidAuthError(HomeAssistantError):
|
||||
@@ -49,14 +53,14 @@ class InvalidUserError(HomeAssistantError):
|
||||
"""Raised when try to login as invalid user."""
|
||||
|
||||
|
||||
@AUTH_PROVIDERS.register('trusted_networks')
|
||||
@AUTH_PROVIDERS.register("trusted_networks")
|
||||
class TrustedNetworksAuthProvider(AuthProvider):
|
||||
"""Trusted Networks auth provider.
|
||||
|
||||
Allow passwordless access from trusted network.
|
||||
"""
|
||||
|
||||
DEFAULT_TITLE = 'Trusted Networks'
|
||||
DEFAULT_TITLE = "Trusted Networks"
|
||||
|
||||
@property
|
||||
def trusted_networks(self) -> List[IPNetwork]:
|
||||
@@ -76,49 +80,58 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
async def async_login_flow(self, context: Optional[Dict]) -> LoginFlow:
|
||||
"""Return a flow to login."""
|
||||
assert context is not None
|
||||
ip_addr = cast(IPAddress, context.get('ip_address'))
|
||||
ip_addr = cast(IPAddress, context.get("ip_address"))
|
||||
users = await self.store.async_get_users()
|
||||
available_users = [user for user in users
|
||||
if not user.system_generated and user.is_active]
|
||||
available_users = [
|
||||
user for user in users if not user.system_generated and user.is_active
|
||||
]
|
||||
for ip_net, user_or_group_list in self.trusted_users.items():
|
||||
if ip_addr in ip_net:
|
||||
user_list = [user_id for user_id in user_or_group_list
|
||||
if isinstance(user_id, str)]
|
||||
group_list = [group[CONF_GROUP] for group in user_or_group_list
|
||||
if isinstance(group, dict)]
|
||||
flattened_group_list = [group for sublist in group_list
|
||||
for group in sublist]
|
||||
user_list = [
|
||||
user_id
|
||||
for user_id in user_or_group_list
|
||||
if isinstance(user_id, str)
|
||||
]
|
||||
group_list = [
|
||||
group[CONF_GROUP]
|
||||
for group in user_or_group_list
|
||||
if isinstance(group, dict)
|
||||
]
|
||||
flattened_group_list = [
|
||||
group for sublist in group_list for group in sublist
|
||||
]
|
||||
available_users = [
|
||||
user for user in available_users
|
||||
if (user.id in user_list or
|
||||
any([group.id in flattened_group_list
|
||||
for group in user.groups]))
|
||||
user
|
||||
for user in available_users
|
||||
if (
|
||||
user.id in user_list
|
||||
or any(
|
||||
[group.id in flattened_group_list for group in user.groups]
|
||||
)
|
||||
)
|
||||
]
|
||||
break
|
||||
|
||||
return TrustedNetworksLoginFlow(
|
||||
self,
|
||||
ip_addr,
|
||||
{
|
||||
user.id: user.name for user in available_users
|
||||
},
|
||||
{user.id: user.name for user in available_users},
|
||||
self.config[CONF_ALLOW_BYPASS_LOGIN],
|
||||
)
|
||||
|
||||
async def async_get_or_create_credentials(
|
||||
self, flow_result: Dict[str, str]) -> Credentials:
|
||||
self, flow_result: Dict[str, str]
|
||||
) -> Credentials:
|
||||
"""Get credentials based on the flow result."""
|
||||
user_id = flow_result['user']
|
||||
user_id = flow_result["user"]
|
||||
|
||||
users = await self.store.async_get_users()
|
||||
for user in users:
|
||||
if (not user.system_generated and
|
||||
user.is_active and
|
||||
user.id == user_id):
|
||||
if not user.system_generated and user.is_active and user.id == user_id:
|
||||
for credential in await self.async_credentials():
|
||||
if credential.data['user_id'] == user_id:
|
||||
if credential.data["user_id"] == user_id:
|
||||
return credential
|
||||
cred = self.async_create_credentials({'user_id': user_id})
|
||||
cred = self.async_create_credentials({"user_id": user_id})
|
||||
await self.store.async_link_user(user, cred)
|
||||
return cred
|
||||
|
||||
@@ -126,7 +139,8 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
raise InvalidUserError
|
||||
|
||||
async def async_user_meta_for_credentials(
|
||||
self, credentials: Credentials) -> UserMeta:
|
||||
self, credentials: Credentials
|
||||
) -> UserMeta:
|
||||
"""Return extra user metadata for credentials.
|
||||
|
||||
Trusted network auth provider should never create new user.
|
||||
@@ -141,20 +155,24 @@ class TrustedNetworksAuthProvider(AuthProvider):
|
||||
Raise InvalidAuthError if trusted_networks is not configured.
|
||||
"""
|
||||
if not self.trusted_networks:
|
||||
raise InvalidAuthError('trusted_networks is not configured')
|
||||
raise InvalidAuthError("trusted_networks is not configured")
|
||||
|
||||
if not any(ip_addr in trusted_network for trusted_network
|
||||
in self.trusted_networks):
|
||||
raise InvalidAuthError('Not in trusted_networks')
|
||||
if not any(
|
||||
ip_addr in trusted_network for trusted_network in self.trusted_networks
|
||||
):
|
||||
raise InvalidAuthError("Not in trusted_networks")
|
||||
|
||||
|
||||
class TrustedNetworksLoginFlow(LoginFlow):
|
||||
"""Handler for the login flow."""
|
||||
|
||||
def __init__(self, auth_provider: TrustedNetworksAuthProvider,
|
||||
ip_addr: IPAddress,
|
||||
available_users: Dict[str, Optional[str]],
|
||||
allow_bypass_login: bool) -> None:
|
||||
def __init__(
|
||||
self,
|
||||
auth_provider: TrustedNetworksAuthProvider,
|
||||
ip_addr: IPAddress,
|
||||
available_users: Dict[str, Optional[str]],
|
||||
allow_bypass_login: bool,
|
||||
) -> None:
|
||||
"""Initialize the login flow."""
|
||||
super().__init__(auth_provider)
|
||||
self._available_users = available_users
|
||||
@@ -162,27 +180,26 @@ class TrustedNetworksLoginFlow(LoginFlow):
|
||||
self._allow_bypass_login = allow_bypass_login
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: Optional[Dict[str, str]] = None) \
|
||||
-> Dict[str, Any]:
|
||||
self, user_input: Optional[Dict[str, str]] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""Handle the step of the form."""
|
||||
try:
|
||||
cast(TrustedNetworksAuthProvider, self._auth_provider)\
|
||||
.async_validate_access(self._ip_address)
|
||||
cast(
|
||||
TrustedNetworksAuthProvider, self._auth_provider
|
||||
).async_validate_access(self._ip_address)
|
||||
|
||||
except InvalidAuthError:
|
||||
return self.async_abort(
|
||||
reason='not_whitelisted'
|
||||
)
|
||||
return self.async_abort(reason="not_whitelisted")
|
||||
|
||||
if user_input is not None:
|
||||
return await self.async_finish(user_input)
|
||||
|
||||
if self._allow_bypass_login and len(self._available_users) == 1:
|
||||
return await self.async_finish({
|
||||
'user': next(iter(self._available_users.keys()))
|
||||
})
|
||||
return await self.async_finish(
|
||||
{"user": next(iter(self._available_users.keys()))}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id='init',
|
||||
data_schema=vol.Schema({'user': vol.In(self._available_users)}),
|
||||
step_id="init",
|
||||
data_schema=vol.Schema({"user": vol.In(self._available_users)}),
|
||||
)
|
||||
|
||||
@@ -10,4 +10,4 @@ def generate_secret(entropy: int = 32) -> str:
|
||||
|
||||
Event loop friendly.
|
||||
"""
|
||||
return binascii.hexlify(os.urandom(entropy)).decode('ascii')
|
||||
return binascii.hexlify(os.urandom(entropy)).decode("ascii")
|
||||
|
||||
@@ -17,36 +17,36 @@ from homeassistant.util.logging import AsyncHandler
|
||||
from homeassistant.util.package import async_get_user_site, is_virtual_env
|
||||
from homeassistant.util.yaml import clear_secret_cache
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ERROR_LOG_FILENAME = 'home-assistant.log'
|
||||
ERROR_LOG_FILENAME = "home-assistant.log"
|
||||
|
||||
# hass.data key for logging information.
|
||||
DATA_LOGGING = 'logging'
|
||||
DATA_LOGGING = "logging"
|
||||
|
||||
DEBUGGER_INTEGRATIONS = {'ptvsd', }
|
||||
CORE_INTEGRATIONS = ('homeassistant', 'persistent_notification')
|
||||
LOGGING_INTEGRATIONS = {'logger', 'system_log'}
|
||||
DEBUGGER_INTEGRATIONS = {"ptvsd"}
|
||||
CORE_INTEGRATIONS = ("homeassistant", "persistent_notification")
|
||||
LOGGING_INTEGRATIONS = {"logger", "system_log"}
|
||||
STAGE_1_INTEGRATIONS = {
|
||||
# To record data
|
||||
'recorder',
|
||||
"recorder",
|
||||
# To make sure we forward data to other instances
|
||||
'mqtt_eventstream',
|
||||
"mqtt_eventstream",
|
||||
}
|
||||
|
||||
|
||||
async def async_from_config_dict(config: Dict[str, Any],
|
||||
hass: core.HomeAssistant,
|
||||
config_dir: Optional[str] = None,
|
||||
enable_log: bool = True,
|
||||
verbose: bool = False,
|
||||
skip_pip: bool = False,
|
||||
log_rotate_days: Any = None,
|
||||
log_file: Any = None,
|
||||
log_no_color: bool = False) \
|
||||
-> Optional[core.HomeAssistant]:
|
||||
async def async_from_config_dict(
|
||||
config: Dict[str, Any],
|
||||
hass: core.HomeAssistant,
|
||||
config_dir: Optional[str] = None,
|
||||
enable_log: bool = True,
|
||||
verbose: bool = False,
|
||||
skip_pip: bool = False,
|
||||
log_rotate_days: Any = None,
|
||||
log_file: Any = None,
|
||||
log_no_color: bool = False,
|
||||
) -> Optional[core.HomeAssistant]:
|
||||
"""Try to configure Home Assistant from a configuration dictionary.
|
||||
|
||||
Dynamically loads required components and its dependencies.
|
||||
@@ -55,28 +55,30 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||
start = time()
|
||||
|
||||
if enable_log:
|
||||
async_enable_logging(hass, verbose, log_rotate_days, log_file,
|
||||
log_no_color)
|
||||
async_enable_logging(hass, verbose, log_rotate_days, log_file, log_no_color)
|
||||
|
||||
hass.config.skip_pip = skip_pip
|
||||
if skip_pip:
|
||||
_LOGGER.warning("Skipping pip installation of required modules. "
|
||||
"This may cause issues")
|
||||
_LOGGER.warning(
|
||||
"Skipping pip installation of required modules. " "This may cause issues"
|
||||
)
|
||||
|
||||
core_config = config.get(core.DOMAIN, {})
|
||||
api_password = config.get('http', {}).get('api_password')
|
||||
trusted_networks = config.get('http', {}).get('trusted_networks')
|
||||
api_password = config.get("http", {}).get("api_password")
|
||||
trusted_networks = config.get("http", {}).get("trusted_networks")
|
||||
|
||||
try:
|
||||
await conf_util.async_process_ha_core_config(
|
||||
hass, core_config, api_password, trusted_networks)
|
||||
hass, core_config, api_password, trusted_networks
|
||||
)
|
||||
except vol.Invalid as config_err:
|
||||
conf_util.async_log_exception(
|
||||
config_err, 'homeassistant', core_config, hass)
|
||||
conf_util.async_log_exception(config_err, "homeassistant", core_config, hass)
|
||||
return None
|
||||
except HomeAssistantError:
|
||||
_LOGGER.error("Home Assistant core failed to initialize. "
|
||||
"Further initialization aborted")
|
||||
_LOGGER.error(
|
||||
"Home Assistant core failed to initialize. "
|
||||
"Further initialization aborted"
|
||||
)
|
||||
return None
|
||||
|
||||
# Make a copy because we are mutating it.
|
||||
@@ -84,7 +86,8 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||
|
||||
# Merge packages
|
||||
await conf_util.merge_packages_config(
|
||||
hass, config, core_config.get(conf_util.CONF_PACKAGES, {}))
|
||||
hass, config, core_config.get(conf_util.CONF_PACKAGES, {})
|
||||
)
|
||||
|
||||
hass.config_entries = config_entries.ConfigEntries(hass, config)
|
||||
await hass.config_entries.async_initialize()
|
||||
@@ -92,64 +95,20 @@ async def async_from_config_dict(config: Dict[str, Any],
|
||||
await _async_set_up_integrations(hass, config)
|
||||
|
||||
stop = time()
|
||||
_LOGGER.info("Home Assistant initialized in %.2fs", stop-start)
|
||||
|
||||
# TEMP: warn users for invalid slugs
|
||||
# Remove after 0.94 or 1.0
|
||||
if cv.INVALID_SLUGS_FOUND or cv.INVALID_ENTITY_IDS_FOUND:
|
||||
msg = []
|
||||
|
||||
if cv.INVALID_ENTITY_IDS_FOUND:
|
||||
msg.append(
|
||||
"Your configuration contains invalid entity ID references. "
|
||||
"Please find and update the following. "
|
||||
"This will become a breaking change."
|
||||
)
|
||||
msg.append('\n'.join('- {} -> {}'.format(*item)
|
||||
for item
|
||||
in cv.INVALID_ENTITY_IDS_FOUND.items()))
|
||||
|
||||
if cv.INVALID_SLUGS_FOUND:
|
||||
msg.append(
|
||||
"Your configuration contains invalid slugs. "
|
||||
"Please find and update the following. "
|
||||
"This will become a breaking change."
|
||||
)
|
||||
msg.append('\n'.join('- {} -> {}'.format(*item)
|
||||
for item in cv.INVALID_SLUGS_FOUND.items()))
|
||||
|
||||
hass.components.persistent_notification.async_create(
|
||||
'\n\n'.join(msg), "Config Warning", "config_warning"
|
||||
)
|
||||
|
||||
# TEMP: warn users of invalid extra keys
|
||||
# Remove after 0.92
|
||||
if cv.INVALID_EXTRA_KEYS_FOUND:
|
||||
msg = []
|
||||
msg.append(
|
||||
"Your configuration contains extra keys "
|
||||
"that the platform does not support (but were silently "
|
||||
"accepted before 0.88). Please find and remove the following."
|
||||
"This will become a breaking change."
|
||||
)
|
||||
msg.append('\n'.join('- {}'.format(it)
|
||||
for it in cv.INVALID_EXTRA_KEYS_FOUND))
|
||||
|
||||
hass.components.persistent_notification.async_create(
|
||||
'\n\n'.join(msg), "Config Warning", "config_warning"
|
||||
)
|
||||
_LOGGER.info("Home Assistant initialized in %.2fs", stop - start)
|
||||
|
||||
return hass
|
||||
|
||||
|
||||
async def async_from_config_file(config_path: str,
|
||||
hass: core.HomeAssistant,
|
||||
verbose: bool = False,
|
||||
skip_pip: bool = True,
|
||||
log_rotate_days: Any = None,
|
||||
log_file: Any = None,
|
||||
log_no_color: bool = False)\
|
||||
-> Optional[core.HomeAssistant]:
|
||||
async def async_from_config_file(
|
||||
config_path: str,
|
||||
hass: core.HomeAssistant,
|
||||
verbose: bool = False,
|
||||
skip_pip: bool = True,
|
||||
log_rotate_days: Any = None,
|
||||
log_file: Any = None,
|
||||
log_no_color: bool = False,
|
||||
) -> Optional[core.HomeAssistant]:
|
||||
"""Read the configuration file and try to start all the functionality.
|
||||
|
||||
Will add functionality to 'hass' parameter.
|
||||
@@ -162,15 +121,14 @@ async def async_from_config_file(config_path: str,
|
||||
if not is_virtual_env():
|
||||
await async_mount_local_lib_path(config_dir)
|
||||
|
||||
async_enable_logging(hass, verbose, log_rotate_days, log_file,
|
||||
log_no_color)
|
||||
async_enable_logging(hass, verbose, log_rotate_days, log_file, log_no_color)
|
||||
|
||||
await hass.async_add_executor_job(
|
||||
conf_util.process_ha_config_upgrade, hass)
|
||||
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
||||
|
||||
try:
|
||||
config_dict = await hass.async_add_executor_job(
|
||||
conf_util.load_yaml_config_file, config_path)
|
||||
conf_util.load_yaml_config_file, config_path
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
_LOGGER.error("Error loading %s: %s", config_path, err)
|
||||
return None
|
||||
@@ -178,43 +136,48 @@ async def async_from_config_file(config_path: str,
|
||||
clear_secret_cache()
|
||||
|
||||
return await async_from_config_dict(
|
||||
config_dict, hass, enable_log=False, skip_pip=skip_pip)
|
||||
config_dict, hass, enable_log=False, skip_pip=skip_pip
|
||||
)
|
||||
|
||||
|
||||
@core.callback
|
||||
def async_enable_logging(hass: core.HomeAssistant,
|
||||
verbose: bool = False,
|
||||
log_rotate_days: Optional[int] = None,
|
||||
log_file: Optional[str] = None,
|
||||
log_no_color: bool = False) -> None:
|
||||
def async_enable_logging(
|
||||
hass: core.HomeAssistant,
|
||||
verbose: bool = False,
|
||||
log_rotate_days: Optional[int] = None,
|
||||
log_file: Optional[str] = None,
|
||||
log_no_color: bool = False,
|
||||
) -> None:
|
||||
"""Set up the logging.
|
||||
|
||||
This method must be run in the event loop.
|
||||
"""
|
||||
fmt = ("%(asctime)s %(levelname)s (%(threadName)s) "
|
||||
"[%(name)s] %(message)s")
|
||||
datefmt = '%Y-%m-%d %H:%M:%S'
|
||||
fmt = "%(asctime)s %(levelname)s (%(threadName)s) " "[%(name)s] %(message)s"
|
||||
datefmt = "%Y-%m-%d %H:%M:%S"
|
||||
|
||||
if not log_no_color:
|
||||
try:
|
||||
from colorlog import ColoredFormatter
|
||||
|
||||
# basicConfig must be called after importing colorlog in order to
|
||||
# ensure that the handlers it sets up wraps the correct streams.
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
colorfmt = "%(log_color)s{}%(reset)s".format(fmt)
|
||||
logging.getLogger().handlers[0].setFormatter(ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
'DEBUG': 'cyan',
|
||||
'INFO': 'green',
|
||||
'WARNING': 'yellow',
|
||||
'ERROR': 'red',
|
||||
'CRITICAL': 'red',
|
||||
}
|
||||
))
|
||||
logging.getLogger().handlers[0].setFormatter(
|
||||
ColoredFormatter(
|
||||
colorfmt,
|
||||
datefmt=datefmt,
|
||||
reset=True,
|
||||
log_colors={
|
||||
"DEBUG": "cyan",
|
||||
"INFO": "green",
|
||||
"WARNING": "yellow",
|
||||
"ERROR": "red",
|
||||
"CRITICAL": "red",
|
||||
},
|
||||
)
|
||||
)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
@@ -223,9 +186,9 @@ def async_enable_logging(hass: core.HomeAssistant,
|
||||
logging.basicConfig(format=fmt, datefmt=datefmt, level=logging.INFO)
|
||||
|
||||
# Suppress overly verbose logs from libraries that aren't helpful
|
||||
logging.getLogger('requests').setLevel(logging.WARNING)
|
||||
logging.getLogger('urllib3').setLevel(logging.WARNING)
|
||||
logging.getLogger('aiohttp.access').setLevel(logging.WARNING)
|
||||
logging.getLogger("requests").setLevel(logging.WARNING)
|
||||
logging.getLogger("urllib3").setLevel(logging.WARNING)
|
||||
logging.getLogger("aiohttp.access").setLevel(logging.WARNING)
|
||||
|
||||
# Log errors to a file if we have write access to file or config dir
|
||||
if log_file is None:
|
||||
@@ -238,16 +201,16 @@ def async_enable_logging(hass: core.HomeAssistant,
|
||||
|
||||
# Check if we can write to the error log if it exists or that
|
||||
# we can create files in the containing directory if not.
|
||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or \
|
||||
(not err_path_exists and os.access(err_dir, os.W_OK)):
|
||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
||||
):
|
||||
|
||||
if log_rotate_days:
|
||||
err_handler = logging.handlers.TimedRotatingFileHandler(
|
||||
err_log_path, when='midnight',
|
||||
backupCount=log_rotate_days) # type: logging.FileHandler
|
||||
err_log_path, when="midnight", backupCount=log_rotate_days
|
||||
) # type: logging.FileHandler
|
||||
else:
|
||||
err_handler = logging.FileHandler(
|
||||
err_log_path, mode='w', delay=True)
|
||||
err_handler = logging.FileHandler(err_log_path, mode="w", delay=True)
|
||||
|
||||
err_handler.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=datefmt))
|
||||
@@ -256,21 +219,19 @@ def async_enable_logging(hass: core.HomeAssistant,
|
||||
|
||||
async def async_stop_async_handler(_: Any) -> None:
|
||||
"""Cleanup async handler."""
|
||||
logging.getLogger('').removeHandler(async_handler) # type: ignore
|
||||
logging.getLogger("").removeHandler(async_handler) # type: ignore
|
||||
await async_handler.async_close(blocking=True)
|
||||
|
||||
hass.bus.async_listen_once(
|
||||
EVENT_HOMEASSISTANT_CLOSE, async_stop_async_handler)
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, async_stop_async_handler)
|
||||
|
||||
logger = logging.getLogger('')
|
||||
logger = logging.getLogger("")
|
||||
logger.addHandler(async_handler) # type: ignore
|
||||
logger.setLevel(logging.INFO)
|
||||
|
||||
# Save the log file location for access by other components.
|
||||
hass.data[DATA_LOGGING] = err_log_path
|
||||
else:
|
||||
_LOGGER.error(
|
||||
"Unable to set up error log %s (access denied)", err_log_path)
|
||||
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
||||
|
||||
|
||||
async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
@@ -278,7 +239,7 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
|
||||
This function is a coroutine.
|
||||
"""
|
||||
deps_dir = os.path.join(config_dir, 'deps')
|
||||
deps_dir = os.path.join(config_dir, "deps")
|
||||
lib_dir = await async_get_user_site(deps_dir)
|
||||
if lib_dir not in sys.path:
|
||||
sys.path.insert(0, lib_dir)
|
||||
@@ -289,21 +250,21 @@ async def async_mount_local_lib_path(config_dir: str) -> str:
|
||||
def _get_domains(hass: core.HomeAssistant, config: Dict[str, Any]) -> Set[str]:
|
||||
"""Get domains of components to set up."""
|
||||
# Filter out the repeating and common config section [homeassistant]
|
||||
domains = set(key.split(' ')[0] for key in config.keys()
|
||||
if key != core.DOMAIN)
|
||||
domains = set(key.split(" ")[0] for key in config.keys() if key != core.DOMAIN)
|
||||
|
||||
# Add config entry domains
|
||||
domains.update(hass.config_entries.async_domains()) # type: ignore
|
||||
|
||||
# Make sure the Hass.io component is loaded
|
||||
if 'HASSIO' in os.environ:
|
||||
domains.add('hassio')
|
||||
if "HASSIO" in os.environ:
|
||||
domains.add("hassio")
|
||||
|
||||
return domains
|
||||
|
||||
|
||||
async def _async_set_up_integrations(
|
||||
hass: core.HomeAssistant, config: Dict[str, Any]) -> None:
|
||||
hass: core.HomeAssistant, config: Dict[str, Any]
|
||||
) -> None:
|
||||
"""Set up all the integrations."""
|
||||
domains = _get_domains(hass, config)
|
||||
|
||||
@@ -311,27 +272,33 @@ async def _async_set_up_integrations(
|
||||
debuggers = domains & DEBUGGER_INTEGRATIONS
|
||||
if debuggers:
|
||||
_LOGGER.debug("Starting up debuggers %s", debuggers)
|
||||
await asyncio.gather(*[
|
||||
async_setup_component(hass, domain, config)
|
||||
for domain in debuggers])
|
||||
await asyncio.gather(
|
||||
*(async_setup_component(hass, domain, config) for domain in debuggers)
|
||||
)
|
||||
domains -= DEBUGGER_INTEGRATIONS
|
||||
|
||||
# Resolve all dependencies of all components so we can find the logging
|
||||
# and integrations that need faster initialization.
|
||||
resolved_domains_task = asyncio.gather(*[
|
||||
loader.async_component_dependencies(hass, domain)
|
||||
for domain in domains
|
||||
], return_exceptions=True)
|
||||
resolved_domains_task = asyncio.gather(
|
||||
*(loader.async_component_dependencies(hass, domain) for domain in domains),
|
||||
return_exceptions=True,
|
||||
)
|
||||
|
||||
# Set up core.
|
||||
_LOGGER.debug("Setting up %s", CORE_INTEGRATIONS)
|
||||
|
||||
if not all(await asyncio.gather(*[
|
||||
async_setup_component(hass, domain, config)
|
||||
for domain in CORE_INTEGRATIONS
|
||||
])):
|
||||
_LOGGER.error("Home Assistant core failed to initialize. "
|
||||
"Further initialization aborted")
|
||||
if not all(
|
||||
await asyncio.gather(
|
||||
*(
|
||||
async_setup_component(hass, domain, config)
|
||||
for domain in CORE_INTEGRATIONS
|
||||
)
|
||||
)
|
||||
):
|
||||
_LOGGER.error(
|
||||
"Home Assistant core failed to initialize. "
|
||||
"Further initialization aborted"
|
||||
)
|
||||
return
|
||||
|
||||
_LOGGER.debug("Home Assistant core initialized")
|
||||
@@ -351,36 +318,32 @@ async def _async_set_up_integrations(
|
||||
if logging_domains:
|
||||
_LOGGER.info("Setting up %s", logging_domains)
|
||||
|
||||
await asyncio.gather(*[
|
||||
async_setup_component(hass, domain, config)
|
||||
for domain in logging_domains
|
||||
])
|
||||
await asyncio.gather(
|
||||
*(async_setup_component(hass, domain, config) for domain in logging_domains)
|
||||
)
|
||||
|
||||
# Kick off loading the registries. They don't need to be awaited.
|
||||
asyncio.gather(
|
||||
hass.helpers.device_registry.async_get_registry(),
|
||||
hass.helpers.entity_registry.async_get_registry(),
|
||||
hass.helpers.area_registry.async_get_registry())
|
||||
hass.helpers.area_registry.async_get_registry(),
|
||||
)
|
||||
|
||||
if stage_1_domains:
|
||||
await asyncio.gather(*[
|
||||
async_setup_component(hass, domain, config)
|
||||
for domain in stage_1_domains
|
||||
])
|
||||
await asyncio.gather(
|
||||
*(async_setup_component(hass, domain, config) for domain in stage_1_domains)
|
||||
)
|
||||
|
||||
# Load all integrations
|
||||
after_dependencies = {} # type: Dict[str, Set[str]]
|
||||
|
||||
for int_or_exc in await asyncio.gather(*[
|
||||
loader.async_get_integration(hass, domain)
|
||||
for domain in stage_2_domains
|
||||
], return_exceptions=True):
|
||||
for int_or_exc in await asyncio.gather(
|
||||
*(loader.async_get_integration(hass, domain) for domain in stage_2_domains),
|
||||
return_exceptions=True,
|
||||
):
|
||||
# Exceptions are handled in async_setup_component.
|
||||
if (isinstance(int_or_exc, loader.Integration) and
|
||||
int_or_exc.after_dependencies):
|
||||
after_dependencies[int_or_exc.domain] = set(
|
||||
int_or_exc.after_dependencies
|
||||
)
|
||||
if isinstance(int_or_exc, loader.Integration) and int_or_exc.after_dependencies:
|
||||
after_dependencies[int_or_exc.domain] = set(int_or_exc.after_dependencies)
|
||||
|
||||
last_load = None
|
||||
while stage_2_domains:
|
||||
@@ -390,8 +353,7 @@ async def _async_set_up_integrations(
|
||||
after_deps = after_dependencies.get(domain)
|
||||
# Load if integration has no after_dependencies or they are
|
||||
# all loaded
|
||||
if (not after_deps or
|
||||
not after_deps-hass.config.components):
|
||||
if not after_deps or not after_deps - hass.config.components:
|
||||
domains_to_load.add(domain)
|
||||
|
||||
if not domains_to_load or domains_to_load == last_load:
|
||||
@@ -399,10 +361,9 @@ async def _async_set_up_integrations(
|
||||
|
||||
_LOGGER.debug("Setting up %s", domains_to_load)
|
||||
|
||||
await asyncio.gather(*[
|
||||
async_setup_component(hass, domain, config)
|
||||
for domain in domains_to_load
|
||||
])
|
||||
await asyncio.gather(
|
||||
*(async_setup_component(hass, domain, config) for domain in domains_to_load)
|
||||
)
|
||||
|
||||
last_load = domains_to_load
|
||||
stage_2_domains -= domains_to_load
|
||||
@@ -412,10 +373,9 @@ async def _async_set_up_integrations(
|
||||
if stage_2_domains:
|
||||
_LOGGER.debug("Final set up: %s", stage_2_domains)
|
||||
|
||||
await asyncio.gather(*[
|
||||
async_setup_component(hass, domain, config)
|
||||
for domain in stage_2_domains
|
||||
])
|
||||
await asyncio.gather(
|
||||
*(async_setup_component(hass, domain, config) for domain in stage_2_domains)
|
||||
)
|
||||
|
||||
# Wrap up startup
|
||||
await hass.async_block_till_done()
|
||||
|
||||
@@ -31,12 +31,11 @@ def is_on(hass, entity_id=None):
|
||||
component = getattr(hass.components, domain)
|
||||
|
||||
except ImportError:
|
||||
_LOGGER.error('Failed to call %s.is_on: component not found',
|
||||
domain)
|
||||
_LOGGER.error("Failed to call %s.is_on: component not found", domain)
|
||||
continue
|
||||
|
||||
if not hasattr(component, 'is_on'):
|
||||
_LOGGER.warning("Component %s has no is_on method.", domain)
|
||||
if not hasattr(component, "is_on"):
|
||||
_LOGGER.warning("Integration %s has no is_on method.", domain)
|
||||
continue
|
||||
|
||||
if component.is_on(ent_id):
|
||||
|
||||
@@ -6,9 +6,18 @@ from requests.exceptions import HTTPError, ConnectTimeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION, ATTR_DATE, ATTR_TIME, ATTR_ENTITY_ID, CONF_USERNAME,
|
||||
CONF_PASSWORD, CONF_EXCLUDE, CONF_NAME, CONF_LIGHTS,
|
||||
EVENT_HOMEASSISTANT_STOP, EVENT_HOMEASSISTANT_START)
|
||||
ATTR_ATTRIBUTION,
|
||||
ATTR_DATE,
|
||||
ATTR_TIME,
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_USERNAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_EXCLUDE,
|
||||
CONF_NAME,
|
||||
CONF_LIGHTS,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
EVENT_HOMEASSISTANT_START,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers import discovery
|
||||
from homeassistant.helpers.entity import Entity
|
||||
@@ -17,77 +26,88 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTRIBUTION = "Data provided by goabode.com"
|
||||
|
||||
CONF_POLLING = 'polling'
|
||||
CONF_POLLING = "polling"
|
||||
|
||||
DOMAIN = 'abode'
|
||||
DEFAULT_CACHEDB = './abodepy_cache.pickle'
|
||||
DOMAIN = "abode"
|
||||
DEFAULT_CACHEDB = "./abodepy_cache.pickle"
|
||||
|
||||
NOTIFICATION_ID = 'abode_notification'
|
||||
NOTIFICATION_TITLE = 'Abode Security Setup'
|
||||
NOTIFICATION_ID = "abode_notification"
|
||||
NOTIFICATION_TITLE = "Abode Security Setup"
|
||||
|
||||
EVENT_ABODE_ALARM = 'abode_alarm'
|
||||
EVENT_ABODE_ALARM_END = 'abode_alarm_end'
|
||||
EVENT_ABODE_AUTOMATION = 'abode_automation'
|
||||
EVENT_ABODE_FAULT = 'abode_panel_fault'
|
||||
EVENT_ABODE_RESTORE = 'abode_panel_restore'
|
||||
EVENT_ABODE_ALARM = "abode_alarm"
|
||||
EVENT_ABODE_ALARM_END = "abode_alarm_end"
|
||||
EVENT_ABODE_AUTOMATION = "abode_automation"
|
||||
EVENT_ABODE_FAULT = "abode_panel_fault"
|
||||
EVENT_ABODE_RESTORE = "abode_panel_restore"
|
||||
|
||||
SERVICE_SETTINGS = 'change_setting'
|
||||
SERVICE_CAPTURE_IMAGE = 'capture_image'
|
||||
SERVICE_TRIGGER = 'trigger_quick_action'
|
||||
SERVICE_SETTINGS = "change_setting"
|
||||
SERVICE_CAPTURE_IMAGE = "capture_image"
|
||||
SERVICE_TRIGGER = "trigger_quick_action"
|
||||
|
||||
ATTR_DEVICE_ID = 'device_id'
|
||||
ATTR_DEVICE_NAME = 'device_name'
|
||||
ATTR_DEVICE_TYPE = 'device_type'
|
||||
ATTR_EVENT_CODE = 'event_code'
|
||||
ATTR_EVENT_NAME = 'event_name'
|
||||
ATTR_EVENT_TYPE = 'event_type'
|
||||
ATTR_EVENT_UTC = 'event_utc'
|
||||
ATTR_SETTING = 'setting'
|
||||
ATTR_USER_NAME = 'user_name'
|
||||
ATTR_VALUE = 'value'
|
||||
ATTR_DEVICE_ID = "device_id"
|
||||
ATTR_DEVICE_NAME = "device_name"
|
||||
ATTR_DEVICE_TYPE = "device_type"
|
||||
ATTR_EVENT_CODE = "event_code"
|
||||
ATTR_EVENT_NAME = "event_name"
|
||||
ATTR_EVENT_TYPE = "event_type"
|
||||
ATTR_EVENT_UTC = "event_utc"
|
||||
ATTR_SETTING = "setting"
|
||||
ATTR_USER_NAME = "user_name"
|
||||
ATTR_VALUE = "value"
|
||||
|
||||
ABODE_DEVICE_ID_LIST_SCHEMA = vol.Schema([str])
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_POLLING, default=False): cv.boolean,
|
||||
vol.Optional(CONF_EXCLUDE, default=[]): ABODE_DEVICE_ID_LIST_SCHEMA,
|
||||
vol.Optional(CONF_LIGHTS, default=[]): ABODE_DEVICE_ID_LIST_SCHEMA
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_POLLING, default=False): cv.boolean,
|
||||
vol.Optional(CONF_EXCLUDE, default=[]): ABODE_DEVICE_ID_LIST_SCHEMA,
|
||||
vol.Optional(CONF_LIGHTS, default=[]): ABODE_DEVICE_ID_LIST_SCHEMA,
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
CHANGE_SETTING_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_SETTING): cv.string,
|
||||
vol.Required(ATTR_VALUE): cv.string
|
||||
})
|
||||
CHANGE_SETTING_SCHEMA = vol.Schema(
|
||||
{vol.Required(ATTR_SETTING): cv.string, vol.Required(ATTR_VALUE): cv.string}
|
||||
)
|
||||
|
||||
CAPTURE_IMAGE_SCHEMA = vol.Schema({
|
||||
ATTR_ENTITY_ID: cv.entity_ids,
|
||||
})
|
||||
CAPTURE_IMAGE_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
TRIGGER_SCHEMA = vol.Schema({
|
||||
ATTR_ENTITY_ID: cv.entity_ids,
|
||||
})
|
||||
TRIGGER_SCHEMA = vol.Schema({ATTR_ENTITY_ID: cv.entity_ids})
|
||||
|
||||
ABODE_PLATFORMS = [
|
||||
'alarm_control_panel', 'binary_sensor', 'lock', 'switch', 'cover',
|
||||
'camera', 'light', 'sensor'
|
||||
"alarm_control_panel",
|
||||
"binary_sensor",
|
||||
"lock",
|
||||
"switch",
|
||||
"cover",
|
||||
"camera",
|
||||
"light",
|
||||
"sensor",
|
||||
]
|
||||
|
||||
|
||||
class AbodeSystem:
|
||||
"""Abode System class."""
|
||||
|
||||
def __init__(self, username, password, cache,
|
||||
name, polling, exclude, lights):
|
||||
def __init__(self, username, password, cache, name, polling, exclude, lights):
|
||||
"""Initialize the system."""
|
||||
import abodepy
|
||||
|
||||
self.abode = abodepy.Abode(
|
||||
username, password, auto_login=True, get_devices=True,
|
||||
get_automations=True, cache_path=cache)
|
||||
username,
|
||||
password,
|
||||
auto_login=True,
|
||||
get_devices=True,
|
||||
get_automations=True,
|
||||
cache_path=cache,
|
||||
)
|
||||
self.name = name
|
||||
self.polling = polling
|
||||
self.exclude = exclude
|
||||
@@ -106,9 +126,9 @@ class AbodeSystem:
|
||||
"""Check if a switch device is configured as a light."""
|
||||
import abodepy.helpers.constants as CONST
|
||||
|
||||
return (device.generic_type == CONST.TYPE_LIGHT or
|
||||
(device.generic_type == CONST.TYPE_SWITCH and
|
||||
device.device_id in self.lights))
|
||||
return device.generic_type == CONST.TYPE_LIGHT or (
|
||||
device.generic_type == CONST.TYPE_SWITCH and device.device_id in self.lights
|
||||
)
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
@@ -126,16 +146,18 @@ def setup(hass, config):
|
||||
try:
|
||||
cache = hass.config.path(DEFAULT_CACHEDB)
|
||||
hass.data[DOMAIN] = AbodeSystem(
|
||||
username, password, cache, name, polling, exclude, lights)
|
||||
username, password, cache, name, polling, exclude, lights
|
||||
)
|
||||
except (AbodeException, ConnectTimeout, HTTPError) as ex:
|
||||
_LOGGER.error("Unable to connect to Abode: %s", str(ex))
|
||||
|
||||
hass.components.persistent_notification.create(
|
||||
'Error: {}<br />'
|
||||
'You will need to restart hass after fixing.'
|
||||
''.format(ex),
|
||||
"Error: {}<br />"
|
||||
"You will need to restart hass after fixing."
|
||||
"".format(ex),
|
||||
title=NOTIFICATION_TITLE,
|
||||
notification_id=NOTIFICATION_ID)
|
||||
notification_id=NOTIFICATION_ID,
|
||||
)
|
||||
return False
|
||||
|
||||
setup_hass_services(hass)
|
||||
@@ -166,8 +188,11 @@ def setup_hass_services(hass):
|
||||
"""Capture a new image."""
|
||||
entity_ids = call.data.get(ATTR_ENTITY_ID)
|
||||
|
||||
target_devices = [device for device in hass.data[DOMAIN].devices
|
||||
if device.entity_id in entity_ids]
|
||||
target_devices = [
|
||||
device
|
||||
for device in hass.data[DOMAIN].devices
|
||||
if device.entity_id in entity_ids
|
||||
]
|
||||
|
||||
for device in target_devices:
|
||||
device.capture()
|
||||
@@ -176,27 +201,31 @@ def setup_hass_services(hass):
|
||||
"""Trigger a quick action."""
|
||||
entity_ids = call.data.get(ATTR_ENTITY_ID, None)
|
||||
|
||||
target_devices = [device for device in hass.data[DOMAIN].devices
|
||||
if device.entity_id in entity_ids]
|
||||
target_devices = [
|
||||
device
|
||||
for device in hass.data[DOMAIN].devices
|
||||
if device.entity_id in entity_ids
|
||||
]
|
||||
|
||||
for device in target_devices:
|
||||
device.trigger()
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_SETTINGS, change_setting,
|
||||
schema=CHANGE_SETTING_SCHEMA)
|
||||
DOMAIN, SERVICE_SETTINGS, change_setting, schema=CHANGE_SETTING_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image,
|
||||
schema=CAPTURE_IMAGE_SCHEMA)
|
||||
DOMAIN, SERVICE_CAPTURE_IMAGE, capture_image, schema=CAPTURE_IMAGE_SCHEMA
|
||||
)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_TRIGGER, trigger_quick_action,
|
||||
schema=TRIGGER_SCHEMA)
|
||||
DOMAIN, SERVICE_TRIGGER, trigger_quick_action, schema=TRIGGER_SCHEMA
|
||||
)
|
||||
|
||||
|
||||
def setup_hass_events(hass):
|
||||
"""Home Assistant start and stop callbacks."""
|
||||
|
||||
def startup(event):
|
||||
"""Listen for push events."""
|
||||
hass.data[DOMAIN].abode.events.start()
|
||||
@@ -222,28 +251,32 @@ def setup_abode_events(hass):
|
||||
def event_callback(event, event_json):
|
||||
"""Handle an event callback from Abode."""
|
||||
data = {
|
||||
ATTR_DEVICE_ID: event_json.get(ATTR_DEVICE_ID, ''),
|
||||
ATTR_DEVICE_NAME: event_json.get(ATTR_DEVICE_NAME, ''),
|
||||
ATTR_DEVICE_TYPE: event_json.get(ATTR_DEVICE_TYPE, ''),
|
||||
ATTR_EVENT_CODE: event_json.get(ATTR_EVENT_CODE, ''),
|
||||
ATTR_EVENT_NAME: event_json.get(ATTR_EVENT_NAME, ''),
|
||||
ATTR_EVENT_TYPE: event_json.get(ATTR_EVENT_TYPE, ''),
|
||||
ATTR_EVENT_UTC: event_json.get(ATTR_EVENT_UTC, ''),
|
||||
ATTR_USER_NAME: event_json.get(ATTR_USER_NAME, ''),
|
||||
ATTR_DATE: event_json.get(ATTR_DATE, ''),
|
||||
ATTR_TIME: event_json.get(ATTR_TIME, ''),
|
||||
ATTR_DEVICE_ID: event_json.get(ATTR_DEVICE_ID, ""),
|
||||
ATTR_DEVICE_NAME: event_json.get(ATTR_DEVICE_NAME, ""),
|
||||
ATTR_DEVICE_TYPE: event_json.get(ATTR_DEVICE_TYPE, ""),
|
||||
ATTR_EVENT_CODE: event_json.get(ATTR_EVENT_CODE, ""),
|
||||
ATTR_EVENT_NAME: event_json.get(ATTR_EVENT_NAME, ""),
|
||||
ATTR_EVENT_TYPE: event_json.get(ATTR_EVENT_TYPE, ""),
|
||||
ATTR_EVENT_UTC: event_json.get(ATTR_EVENT_UTC, ""),
|
||||
ATTR_USER_NAME: event_json.get(ATTR_USER_NAME, ""),
|
||||
ATTR_DATE: event_json.get(ATTR_DATE, ""),
|
||||
ATTR_TIME: event_json.get(ATTR_TIME, ""),
|
||||
}
|
||||
|
||||
hass.bus.fire(event, data)
|
||||
|
||||
events = [TIMELINE.ALARM_GROUP, TIMELINE.ALARM_END_GROUP,
|
||||
TIMELINE.PANEL_FAULT_GROUP, TIMELINE.PANEL_RESTORE_GROUP,
|
||||
TIMELINE.AUTOMATION_GROUP]
|
||||
events = [
|
||||
TIMELINE.ALARM_GROUP,
|
||||
TIMELINE.ALARM_END_GROUP,
|
||||
TIMELINE.PANEL_FAULT_GROUP,
|
||||
TIMELINE.PANEL_RESTORE_GROUP,
|
||||
TIMELINE.AUTOMATION_GROUP,
|
||||
]
|
||||
|
||||
for event in events:
|
||||
hass.data[DOMAIN].abode.events.add_event_callback(
|
||||
event,
|
||||
partial(event_callback, event))
|
||||
event, partial(event_callback, event)
|
||||
)
|
||||
|
||||
|
||||
class AbodeDevice(Entity):
|
||||
@@ -258,7 +291,8 @@ class AbodeDevice(Entity):
|
||||
"""Subscribe Abode events."""
|
||||
self.hass.async_add_job(
|
||||
self._data.abode.events.add_device_callback,
|
||||
self._device.device_id, self._update_callback
|
||||
self._device.device_id,
|
||||
self._update_callback,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -280,10 +314,10 @@ class AbodeDevice(Entity):
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: ATTRIBUTION,
|
||||
'device_id': self._device.device_id,
|
||||
'battery_low': self._device.battery_low,
|
||||
'no_response': self._device.no_response,
|
||||
'device_type': self._device.type
|
||||
"device_id": self._device.device_id,
|
||||
"battery_low": self._device.battery_low,
|
||||
"no_response": self._device.no_response,
|
||||
"device_type": self._device.type,
|
||||
}
|
||||
|
||||
def _update_callback(self, device):
|
||||
@@ -305,7 +339,8 @@ class AbodeAutomation(Entity):
|
||||
if self._event:
|
||||
self.hass.async_add_job(
|
||||
self._data.abode.events.add_event_callback,
|
||||
self._event, self._update_callback
|
||||
self._event,
|
||||
self._update_callback,
|
||||
)
|
||||
|
||||
@property
|
||||
@@ -327,9 +362,9 @@ class AbodeAutomation(Entity):
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: ATTRIBUTION,
|
||||
'automation_id': self._automation.automation_id,
|
||||
'type': self._automation.type,
|
||||
'sub_type': self._automation.sub_type
|
||||
"automation_id": self._automation.automation_id,
|
||||
"type": self._automation.type,
|
||||
"sub_type": self._automation.sub_type,
|
||||
}
|
||||
|
||||
def _update_callback(self, device):
|
||||
|
||||
@@ -3,14 +3,17 @@ import logging
|
||||
|
||||
import homeassistant.components.alarm_control_panel as alarm
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_DISARMED)
|
||||
ATTR_ATTRIBUTION,
|
||||
STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_DISARMED,
|
||||
)
|
||||
|
||||
from . import ATTRIBUTION, DOMAIN as ABODE_DOMAIN, AbodeDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ICON = 'mdi:security'
|
||||
ICON = "mdi:security"
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -72,7 +75,7 @@ class AbodeAlarm(AbodeDevice, alarm.AlarmControlPanel):
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
ATTR_ATTRIBUTION: ATTRIBUTION,
|
||||
'device_id': self._device.device_id,
|
||||
'battery_backup': self._device.battery,
|
||||
'cellular_backup': self._device.is_cellular,
|
||||
"device_id": self._device.device_id,
|
||||
"battery_backup": self._device.battery,
|
||||
"cellular_backup": self._device.is_cellular,
|
||||
}
|
||||
|
||||
@@ -15,9 +15,13 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
|
||||
data = hass.data[ABODE_DOMAIN]
|
||||
|
||||
device_types = [CONST.TYPE_CONNECTIVITY, CONST.TYPE_MOISTURE,
|
||||
CONST.TYPE_MOTION, CONST.TYPE_OCCUPANCY,
|
||||
CONST.TYPE_OPENING]
|
||||
device_types = [
|
||||
CONST.TYPE_CONNECTIVITY,
|
||||
CONST.TYPE_MOISTURE,
|
||||
CONST.TYPE_MOTION,
|
||||
CONST.TYPE_OCCUPANCY,
|
||||
CONST.TYPE_OPENING,
|
||||
]
|
||||
|
||||
devices = []
|
||||
for device in data.abode.get_devices(generic_type=device_types):
|
||||
@@ -26,13 +30,15 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
|
||||
devices.append(AbodeBinarySensor(data, device))
|
||||
|
||||
for automation in data.abode.get_automations(
|
||||
generic_type=CONST.TYPE_QUICK_ACTION):
|
||||
for automation in data.abode.get_automations(generic_type=CONST.TYPE_QUICK_ACTION):
|
||||
if data.is_automation_excluded(automation):
|
||||
continue
|
||||
|
||||
devices.append(AbodeQuickActionBinarySensor(
|
||||
data, automation, TIMELINE.AUTOMATION_EDIT_GROUP))
|
||||
devices.append(
|
||||
AbodeQuickActionBinarySensor(
|
||||
data, automation, TIMELINE.AUTOMATION_EDIT_GROUP
|
||||
)
|
||||
)
|
||||
|
||||
data.devices.extend(devices)
|
||||
|
||||
|
||||
@@ -49,7 +49,8 @@ class AbodeCamera(AbodeDevice, Camera):
|
||||
|
||||
self.hass.async_add_job(
|
||||
self._data.abode.events.add_timeline_callback,
|
||||
self._event, self._capture_callback
|
||||
self._event,
|
||||
self._capture_callback,
|
||||
)
|
||||
|
||||
def capture(self):
|
||||
@@ -66,8 +67,7 @@ class AbodeCamera(AbodeDevice, Camera):
|
||||
"""Attempt to download the most recent capture."""
|
||||
if self._device.image_url:
|
||||
try:
|
||||
self._response = requests.get(
|
||||
self._device.image_url, stream=True)
|
||||
self._response = requests.get(self._device.image_url, stream=True)
|
||||
|
||||
self._response.raise_for_status()
|
||||
except requests.HTTPError as err:
|
||||
|
||||
@@ -3,10 +3,18 @@ import logging
|
||||
from math import ceil
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_HS_COLOR, SUPPORT_BRIGHTNESS,
|
||||
SUPPORT_COLOR, SUPPORT_COLOR_TEMP, Light)
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP,
|
||||
ATTR_HS_COLOR,
|
||||
SUPPORT_BRIGHTNESS,
|
||||
SUPPORT_COLOR,
|
||||
SUPPORT_COLOR_TEMP,
|
||||
Light,
|
||||
)
|
||||
from homeassistant.util.color import (
|
||||
color_temperature_kelvin_to_mired, color_temperature_mired_to_kelvin)
|
||||
color_temperature_kelvin_to_mired,
|
||||
color_temperature_mired_to_kelvin,
|
||||
)
|
||||
|
||||
from . import DOMAIN as ABODE_DOMAIN, AbodeDevice
|
||||
|
||||
@@ -42,8 +50,8 @@ class AbodeLight(AbodeDevice, Light):
|
||||
"""Turn on the light."""
|
||||
if ATTR_COLOR_TEMP in kwargs and self._device.is_color_capable:
|
||||
self._device.set_color_temp(
|
||||
int(color_temperature_mired_to_kelvin(
|
||||
kwargs[ATTR_COLOR_TEMP])))
|
||||
int(color_temperature_mired_to_kelvin(kwargs[ATTR_COLOR_TEMP]))
|
||||
)
|
||||
|
||||
if ATTR_HS_COLOR in kwargs and self._device.is_color_capable:
|
||||
self._device.set_color(kwargs[ATTR_HS_COLOR])
|
||||
|
||||
@@ -2,7 +2,10 @@
|
||||
import logging
|
||||
|
||||
from homeassistant.const import (
|
||||
DEVICE_CLASS_HUMIDITY, DEVICE_CLASS_ILLUMINANCE, DEVICE_CLASS_TEMPERATURE)
|
||||
DEVICE_CLASS_HUMIDITY,
|
||||
DEVICE_CLASS_ILLUMINANCE,
|
||||
DEVICE_CLASS_TEMPERATURE,
|
||||
)
|
||||
|
||||
from . import DOMAIN as ABODE_DOMAIN, AbodeDevice
|
||||
|
||||
@@ -10,9 +13,9 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Sensor types: Name, icon
|
||||
SENSOR_TYPES = {
|
||||
'temp': ['Temperature', DEVICE_CLASS_TEMPERATURE],
|
||||
'humidity': ['Humidity', DEVICE_CLASS_HUMIDITY],
|
||||
'lux': ['Lux', DEVICE_CLASS_ILLUMINANCE],
|
||||
"temp": ["Temperature", DEVICE_CLASS_TEMPERATURE],
|
||||
"humidity": ["Humidity", DEVICE_CLASS_HUMIDITY],
|
||||
"lux": ["Lux", DEVICE_CLASS_ILLUMINANCE],
|
||||
}
|
||||
|
||||
|
||||
@@ -42,8 +45,9 @@ class AbodeSensor(AbodeDevice):
|
||||
"""Initialize a sensor for an Abode device."""
|
||||
super().__init__(data, device)
|
||||
self._sensor_type = sensor_type
|
||||
self._name = '{0} {1}'.format(
|
||||
self._device.name, SENSOR_TYPES[self._sensor_type][0])
|
||||
self._name = "{0} {1}".format(
|
||||
self._device.name, SENSOR_TYPES[self._sensor_type][0]
|
||||
)
|
||||
self._device_class = SENSOR_TYPES[self._sensor_type][1]
|
||||
|
||||
@property
|
||||
@@ -59,19 +63,19 @@ class AbodeSensor(AbodeDevice):
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the sensor."""
|
||||
if self._sensor_type == 'temp':
|
||||
if self._sensor_type == "temp":
|
||||
return self._device.temp
|
||||
if self._sensor_type == 'humidity':
|
||||
if self._sensor_type == "humidity":
|
||||
return self._device.humidity
|
||||
if self._sensor_type == 'lux':
|
||||
if self._sensor_type == "lux":
|
||||
return self._device.lux
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the units of measurement."""
|
||||
if self._sensor_type == 'temp':
|
||||
if self._sensor_type == "temp":
|
||||
return self._device.temp_unit
|
||||
if self._sensor_type == 'humidity':
|
||||
if self._sensor_type == "humidity":
|
||||
return self._device.humidity_unit
|
||||
if self._sensor_type == 'lux':
|
||||
if self._sensor_type == "lux":
|
||||
return self._device.lux_unit
|
||||
|
||||
@@ -25,13 +25,13 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
devices.append(AbodeSwitch(data, device))
|
||||
|
||||
# Get all Abode automations that can be enabled/disabled
|
||||
for automation in data.abode.get_automations(
|
||||
generic_type=CONST.TYPE_AUTOMATION):
|
||||
for automation in data.abode.get_automations(generic_type=CONST.TYPE_AUTOMATION):
|
||||
if data.is_automation_excluded(automation):
|
||||
continue
|
||||
|
||||
devices.append(AbodeAutomationSwitch(
|
||||
data, automation, TIMELINE.AUTOMATION_EDIT_GROUP))
|
||||
devices.append(
|
||||
AbodeAutomationSwitch(data, automation, TIMELINE.AUTOMATION_EDIT_GROUP)
|
||||
)
|
||||
|
||||
data.devices.extend(devices)
|
||||
|
||||
|
||||
@@ -4,50 +4,58 @@ import re
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.switch import (SwitchDevice, PLATFORM_SCHEMA)
|
||||
from homeassistant.components.switch import SwitchDevice, PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
STATE_ON, STATE_OFF, STATE_UNKNOWN, CONF_NAME, CONF_FILENAME)
|
||||
STATE_ON,
|
||||
STATE_OFF,
|
||||
STATE_UNKNOWN,
|
||||
CONF_NAME,
|
||||
CONF_FILENAME,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONF_TIMEOUT = 'timeout'
|
||||
CONF_WRITE_TIMEOUT = 'write_timeout'
|
||||
CONF_TIMEOUT = "timeout"
|
||||
CONF_WRITE_TIMEOUT = "write_timeout"
|
||||
|
||||
DEFAULT_NAME = 'Acer Projector'
|
||||
DEFAULT_NAME = "Acer Projector"
|
||||
DEFAULT_TIMEOUT = 1
|
||||
DEFAULT_WRITE_TIMEOUT = 1
|
||||
|
||||
ECO_MODE = 'ECO Mode'
|
||||
ECO_MODE = "ECO Mode"
|
||||
|
||||
ICON = 'mdi:projector'
|
||||
ICON = "mdi:projector"
|
||||
|
||||
INPUT_SOURCE = 'Input Source'
|
||||
INPUT_SOURCE = "Input Source"
|
||||
|
||||
LAMP = 'Lamp'
|
||||
LAMP_HOURS = 'Lamp Hours'
|
||||
LAMP = "Lamp"
|
||||
LAMP_HOURS = "Lamp Hours"
|
||||
|
||||
MODEL = 'Model'
|
||||
MODEL = "Model"
|
||||
|
||||
# Commands known to the projector
|
||||
CMD_DICT = {
|
||||
LAMP: '* 0 Lamp ?\r',
|
||||
LAMP_HOURS: '* 0 Lamp\r',
|
||||
INPUT_SOURCE: '* 0 Src ?\r',
|
||||
ECO_MODE: '* 0 IR 052\r',
|
||||
MODEL: '* 0 IR 035\r',
|
||||
STATE_ON: '* 0 IR 001\r',
|
||||
STATE_OFF: '* 0 IR 002\r',
|
||||
LAMP: "* 0 Lamp ?\r",
|
||||
LAMP_HOURS: "* 0 Lamp\r",
|
||||
INPUT_SOURCE: "* 0 Src ?\r",
|
||||
ECO_MODE: "* 0 IR 052\r",
|
||||
MODEL: "* 0 IR 035\r",
|
||||
STATE_ON: "* 0 IR 001\r",
|
||||
STATE_OFF: "* 0 IR 002\r",
|
||||
}
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_FILENAME): cv.isdevice,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(CONF_WRITE_TIMEOUT, default=DEFAULT_WRITE_TIMEOUT):
|
||||
cv.positive_int,
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FILENAME): cv.isdevice,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
|
||||
vol.Optional(
|
||||
CONF_WRITE_TIMEOUT, default=DEFAULT_WRITE_TIMEOUT
|
||||
): cv.positive_int,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -66,9 +74,10 @@ class AcerSwitch(SwitchDevice):
|
||||
def __init__(self, serial_port, name, timeout, write_timeout, **kwargs):
|
||||
"""Init of the Acer projector."""
|
||||
import serial
|
||||
|
||||
self.ser = serial.Serial(
|
||||
port=serial_port, timeout=timeout, write_timeout=write_timeout,
|
||||
**kwargs)
|
||||
port=serial_port, timeout=timeout, write_timeout=write_timeout, **kwargs
|
||||
)
|
||||
self._serial_port = serial_port
|
||||
self._name = name
|
||||
self._state = False
|
||||
@@ -82,6 +91,7 @@ class AcerSwitch(SwitchDevice):
|
||||
def _write_read(self, msg):
|
||||
"""Write to the projector and read the return."""
|
||||
import serial
|
||||
|
||||
ret = ""
|
||||
# Sometimes the projector won't answer for no reason or the projector
|
||||
# was disconnected during runtime.
|
||||
@@ -89,14 +99,14 @@ class AcerSwitch(SwitchDevice):
|
||||
try:
|
||||
if not self.ser.is_open:
|
||||
self.ser.open()
|
||||
msg = msg.encode('utf-8')
|
||||
msg = msg.encode("utf-8")
|
||||
self.ser.write(msg)
|
||||
# Size is an experience value there is no real limit.
|
||||
# AFAIK there is no limit and no end character so we will usually
|
||||
# need to wait for timeout
|
||||
ret = self.ser.read_until(size=20).decode('utf-8')
|
||||
ret = self.ser.read_until(size=20).decode("utf-8")
|
||||
except serial.SerialException:
|
||||
_LOGGER.error('Problem communicating with %s', self._serial_port)
|
||||
_LOGGER.error("Problem communicating with %s", self._serial_port)
|
||||
self.ser.close()
|
||||
return ret
|
||||
|
||||
@@ -104,7 +114,7 @@ class AcerSwitch(SwitchDevice):
|
||||
"""Write msg, obtain answer and format output."""
|
||||
# answers are formatted as ***\answer\r***
|
||||
awns = self._write_read(msg)
|
||||
match = re.search(r'\r(.+)\r', awns)
|
||||
match = re.search(r"\r(.+)\r", awns)
|
||||
if match:
|
||||
return match.group(1)
|
||||
return STATE_UNKNOWN
|
||||
@@ -133,10 +143,10 @@ class AcerSwitch(SwitchDevice):
|
||||
"""Get the latest state from the projector."""
|
||||
msg = CMD_DICT[LAMP]
|
||||
awns = self._write_read_format(msg)
|
||||
if awns == 'Lamp 1':
|
||||
if awns == "Lamp 1":
|
||||
self._state = True
|
||||
self._available = True
|
||||
elif awns == 'Lamp 0':
|
||||
elif awns == "Lamp 0":
|
||||
self._state = False
|
||||
self._available = True
|
||||
else:
|
||||
|
||||
@@ -8,22 +8,28 @@ import voluptuous as vol
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
import homeassistant.util.dt as dt_util
|
||||
from homeassistant.components.device_tracker import (
|
||||
DOMAIN, PLATFORM_SCHEMA, DeviceScanner)
|
||||
DOMAIN,
|
||||
PLATFORM_SCHEMA,
|
||||
DeviceScanner,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_LEASES_REGEX = re.compile(
|
||||
r'(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})' +
|
||||
r'\smac:\s(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))' +
|
||||
r'\svalid\sfor:\s(?P<timevalid>(-?\d+))' +
|
||||
r'\ssec')
|
||||
r"(?P<ip>([0-9]{1,3}[\.]){3}[0-9]{1,3})"
|
||||
+ r"\smac:\s(?P<mac>([0-9a-f]{2}[:-]){5}([0-9a-f]{2}))"
|
||||
+ r"\svalid\sfor:\s(?P<timevalid>(-?\d+))"
|
||||
+ r"\ssec"
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_scanner(hass, config):
|
||||
@@ -32,7 +38,7 @@ def get_scanner(hass, config):
|
||||
return scanner if scanner.success_init else None
|
||||
|
||||
|
||||
Device = namedtuple('Device', ['mac', 'ip', 'last_update'])
|
||||
Device = namedtuple("Device", ["mac", "ip", "last_update"])
|
||||
|
||||
|
||||
class ActiontecDeviceScanner(DeviceScanner):
|
||||
@@ -75,9 +81,11 @@ class ActiontecDeviceScanner(DeviceScanner):
|
||||
actiontec_data = self.get_actiontec_data()
|
||||
if not actiontec_data:
|
||||
return False
|
||||
self.last_results = [Device(data['mac'], name, now)
|
||||
for name, data in actiontec_data.items()
|
||||
if data['timevalid'] > -60]
|
||||
self.last_results = [
|
||||
Device(data["mac"], name, now)
|
||||
for name, data in actiontec_data.items()
|
||||
if data["timevalid"] > -60
|
||||
]
|
||||
_LOGGER.info("Scan successful")
|
||||
return True
|
||||
|
||||
@@ -85,17 +93,16 @@ class ActiontecDeviceScanner(DeviceScanner):
|
||||
"""Retrieve data from Actiontec MI424WR and return parsed result."""
|
||||
try:
|
||||
telnet = telnetlib.Telnet(self.host)
|
||||
telnet.read_until(b'Username: ')
|
||||
telnet.write((self.username + '\n').encode('ascii'))
|
||||
telnet.read_until(b'Password: ')
|
||||
telnet.write((self.password + '\n').encode('ascii'))
|
||||
prompt = telnet.read_until(
|
||||
b'Wireless Broadband Router> ').split(b'\n')[-1]
|
||||
telnet.write('firewall mac_cache_dump\n'.encode('ascii'))
|
||||
telnet.write('\n'.encode('ascii'))
|
||||
telnet.read_until(b"Username: ")
|
||||
telnet.write((self.username + "\n").encode("ascii"))
|
||||
telnet.read_until(b"Password: ")
|
||||
telnet.write((self.password + "\n").encode("ascii"))
|
||||
prompt = telnet.read_until(b"Wireless Broadband Router> ").split(b"\n")[-1]
|
||||
telnet.write("firewall mac_cache_dump\n".encode("ascii"))
|
||||
telnet.write("\n".encode("ascii"))
|
||||
telnet.read_until(prompt)
|
||||
leases_result = telnet.read_until(prompt).split(b'\n')[1:-1]
|
||||
telnet.write('exit\n'.encode('ascii'))
|
||||
leases_result = telnet.read_until(prompt).split(b"\n")[1:-1]
|
||||
telnet.write("exit\n".encode("ascii"))
|
||||
except EOFError:
|
||||
_LOGGER.exception("Unexpected response from router")
|
||||
return
|
||||
@@ -105,11 +112,11 @@ class ActiontecDeviceScanner(DeviceScanner):
|
||||
|
||||
devices = {}
|
||||
for lease in leases_result:
|
||||
match = _LEASES_REGEX.search(lease.decode('utf-8'))
|
||||
match = _LEASES_REGEX.search(lease.decode("utf-8"))
|
||||
if match is not None:
|
||||
devices[match.group('ip')] = {
|
||||
'ip': match.group('ip'),
|
||||
'mac': match.group('mac').upper(),
|
||||
'timevalid': int(match.group('timevalid'))
|
||||
}
|
||||
devices[match.group("ip")] = {
|
||||
"ip": match.group("ip"),
|
||||
"mac": match.group("mac").upper(),
|
||||
"timevalid": int(match.group("timevalid")),
|
||||
}
|
||||
return devices
|
||||
|
||||
30
homeassistant/components/adguard/.translations/bg.json
Normal file
30
homeassistant/components/adguard/.translations/bg.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "\u0410\u043a\u0442\u0443\u0430\u043b\u0438\u0437\u0438\u0440\u0430\u043d\u0435 \u043d\u0430 \u0441\u044a\u0449\u0435\u0441\u0442\u0432\u0443\u0432\u0430\u0449\u0430\u0442\u0430 \u043a\u043e\u043d\u0444\u0438\u0433\u0443\u0440\u0430\u0446\u0438\u044f.",
|
||||
"single_instance_allowed": "\u0420\u0430\u0437\u0440\u0435\u0448\u0435\u043d\u0430 \u0435 \u0441\u0430\u043c\u043e \u0435\u0434\u043d\u0430 \u043a\u043e\u043d\u0444\u0438\u0433\u0443\u0440\u0430\u0446\u0438\u044f \u043d\u0430 AdGuard Home."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "\u041d\u0435\u0443\u0441\u043f\u0435\u0448\u043d\u043e \u0441\u0432\u044a\u0440\u0437\u0432\u0430\u043d\u0435."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "\u0418\u0441\u043a\u0430\u0442\u0435 \u043b\u0438 \u0434\u0430 \u043a\u043e\u043d\u0444\u0438\u0433\u0443\u0440\u0438\u0440\u0430\u0442\u0435 Home Assistant \u0434\u0430 \u0441\u0435 \u0441\u0432\u044a\u0440\u0437\u0432\u0430 \u0441 AdGuard Home, \u043f\u0440\u0435\u0434\u043e\u0441\u0442\u0430\u0432\u0435\u043d \u043e\u0442 Hass.io \u0434\u043e\u0431\u0430\u0432\u043a\u0430\u0442\u0430: {addon} ?",
|
||||
"title": "AdGuard Home \u0447\u0440\u0435\u0437 Hass.io \u0434\u043e\u0431\u0430\u0432\u043a\u0430"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "\u0410\u0434\u0440\u0435\u0441",
|
||||
"password": "\u041f\u0430\u0440\u043e\u043b\u0430",
|
||||
"port": "\u041f\u043e\u0440\u0442",
|
||||
"ssl": "AdGuard Home \u0438\u0437\u043f\u043e\u043b\u0437\u0432\u0430 SSL \u0441\u0435\u0440\u0442\u0438\u0444\u0438\u043a\u0430\u0442",
|
||||
"username": "\u041f\u043e\u0442\u0440\u0435\u0431\u0438\u0442\u0435\u043b\u0441\u043a\u043e \u0438\u043c\u0435",
|
||||
"verify_ssl": "AdGuard Home \u0438\u0437\u043f\u043e\u043b\u0437\u0432\u0430 \u043d\u0430\u0434\u0435\u0436\u0434\u0435\u043d \u0441\u0435\u0440\u0442\u0438\u0444\u0438\u043a\u0430\u0442"
|
||||
},
|
||||
"description": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u0442\u0435 \u0412\u0430\u0448\u0438\u044f AdGuard Home, \u0437\u0430 \u0434\u0430 \u043f\u043e\u0437\u0432\u043e\u043b\u0438\u0442\u0435 \u043d\u0430\u0431\u043b\u044e\u0434\u0435\u043d\u0438\u0435 \u0438 \u043a\u043e\u043d\u0442\u0440\u043e\u043b.",
|
||||
"title": "\u0421\u0432\u044a\u0440\u0436\u0435\u0442\u0435 \u0412\u0430\u0448\u0438\u044f AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/ca.json
Normal file
30
homeassistant/components/adguard/.translations/ca.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "S'ha actualitzat la configuraci\u00f3 existent.",
|
||||
"single_instance_allowed": "Nom\u00e9s es permet una \u00fanica configuraci\u00f3 d'AdGuard Home."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "No s'ha pogut connectar."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Vols configurar Home Assistant perqu\u00e8 es connecti amb l'AdGuard Home proporcionat pel complement de Hass.io: {addon}?",
|
||||
"title": "AdGuard Home (complement de Hass.io)"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Amfitri\u00f3",
|
||||
"password": "Contrasenya",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Home utilitza un certificat SSL",
|
||||
"username": "Nom d'usuari",
|
||||
"verify_ssl": "AdGuard Home utilitza un certificat adequat"
|
||||
},
|
||||
"description": "Configuraci\u00f3 de la inst\u00e0ncia d'AdGuard Home, permet el control i la monitoritzaci\u00f3.",
|
||||
"title": "Enlla\u00e7ar AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/da.json
Normal file
30
homeassistant/components/adguard/.translations/da.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Opdaterede eksisterende konfiguration.",
|
||||
"single_instance_allowed": "Det er kun n\u00f8dvendigt med en ops\u00e6tning af AdGuard Home."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Forbindelse mislykkedes."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Vil du konfigurere Home Assistant til at oprette forbindelse til Adguard Home, der leveres af Hass.io add-on: {addon}?",
|
||||
"title": "AdGuard Home via Hass.io add-on"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "V\u00e6rt",
|
||||
"password": "Adgangskode",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Home bruger et SSL-certifikat",
|
||||
"username": "Brugernavn",
|
||||
"verify_ssl": "AdGuard Home bruger et korrekt certifikat"
|
||||
},
|
||||
"description": "Konfigurer din AdGuard Home instans for at tillade overv\u00e5gning og kontrol.",
|
||||
"title": "Link AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/de.json
Normal file
30
homeassistant/components/adguard/.translations/de.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Bestehende Konfiguration wurde aktualisiert.",
|
||||
"single_instance_allowed": "Es ist nur eine einzige Konfiguration von AdGuard Home zul\u00e4ssig."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Fehler beim Herstellen einer Verbindung."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "M\u00f6chtest du Home Assistant so konfigurieren, dass eine Verbindung mit AdGuard Home als Hass.io-Add-On hergestellt wird: {addon}?",
|
||||
"title": "AdGuard Home \u00fcber das Hass.io Add-on"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host",
|
||||
"password": "Passwort",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Home verwendet ein SSL-Zertifikat",
|
||||
"username": "Benutzername",
|
||||
"verify_ssl": "AdGuard Home verwendet ein richtiges Zertifikat"
|
||||
},
|
||||
"description": "Richte deine AdGuard Home-Instanz ein um sie zu \u00dcberwachen und zu Steuern.",
|
||||
"title": "Verkn\u00fcpfe AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/en.json
Normal file
30
homeassistant/components/adguard/.translations/en.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Updated existing configuration.",
|
||||
"single_instance_allowed": "Only a single configuration of AdGuard Home is allowed."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Failed to connect."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Do you want to configure Home Assistant to connect to the AdGuard Home provided by the Hass.io add-on: {addon}?",
|
||||
"title": "AdGuard Home via Hass.io add-on"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host",
|
||||
"password": "Password",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Home uses a SSL certificate",
|
||||
"username": "Username",
|
||||
"verify_ssl": "AdGuard Home uses a proper certificate"
|
||||
},
|
||||
"description": "Set up your AdGuard Home instance to allow monitoring and control.",
|
||||
"title": "Link your AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
28
homeassistant/components/adguard/.translations/es-419.json
Normal file
28
homeassistant/components/adguard/.translations/es-419.json
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Se actualiz\u00f3 la configuraci\u00f3n existente.",
|
||||
"single_instance_allowed": "Solo se permite una \u00fanica configuraci\u00f3n de AdGuard Home."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Error al conectar."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "\u00bfDesea configurar Home Assistant para conectarse a la p\u00e1gina principal de AdGuard proporcionada por el complemento Hass.io: {addon}?",
|
||||
"title": "AdGuard Home a trav\u00e9s del complemento Hass.io"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "Contrase\u00f1a",
|
||||
"port": "Puerto",
|
||||
"ssl": "AdGuard Home utiliza un certificado SSL",
|
||||
"username": "Nombre de usuario",
|
||||
"verify_ssl": "AdGuard Home utiliza un certificado adecuado"
|
||||
},
|
||||
"description": "Configure su instancia de AdGuard Home para permitir la supervisi\u00f3n y el control."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
7
homeassistant/components/adguard/.translations/es.json
Normal file
7
homeassistant/components/adguard/.translations/es.json
Normal file
@@ -0,0 +1,7 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Se ha actualizado la configuraci\u00f3n existente."
|
||||
}
|
||||
}
|
||||
}
|
||||
24
homeassistant/components/adguard/.translations/fr.json
Normal file
24
homeassistant/components/adguard/.translations/fr.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "La configuration existante a \u00e9t\u00e9 mise \u00e0 jour."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "\u00c9chec de connexion."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"title": "AdGuard Home via le module compl\u00e9mentaire Hass.io"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "H\u00f4te",
|
||||
"password": "Mot de passe",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Home utilise un certificat SSL",
|
||||
"username": "Nom d'utilisateur"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
21
homeassistant/components/adguard/.translations/it.json
Normal file
21
homeassistant/components/adguard/.translations/it.json
Normal file
@@ -0,0 +1,21 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"single_instance_allowed": "\u00c8 consentita solo una singola configurazione di AdGuard Home."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Impossibile connettersi."
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host",
|
||||
"password": "Password",
|
||||
"port": "Porta",
|
||||
"ssl": "AdGuard Home utilizza un certificato SSL",
|
||||
"username": "Nome utente"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/ko.json
Normal file
30
homeassistant/components/adguard/.translations/ko.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "\uae30\uc874 \uad6c\uc131\uc744 \uc5c5\ub370\uc774\ud2b8\ud588\uc2b5\ub2c8\ub2e4.",
|
||||
"single_instance_allowed": "\ud558\ub098\uc758 AdGuard Home \ub9cc \uad6c\uc131 \ud560 \uc218 \uc788\uc2b5\ub2c8\ub2e4."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "\uc5f0\uacb0\ud558\uc9c0 \ubabb\ud588\uc2b5\ub2c8\ub2e4."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Hass.io {addon} \uc560\ub4dc\uc628\uc73c\ub85c AdGuard Home \uc5d0 \uc5f0\uacb0\ud558\ub3c4\ub85d Home Assistant \ub97c \uad6c\uc131 \ud558\uc2dc\uaca0\uc2b5\ub2c8\uae4c?",
|
||||
"title": "Hass.io \uc560\ub4dc\uc628\uc758 AdGuard Home"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "\ud638\uc2a4\ud2b8",
|
||||
"password": "\ube44\ubc00\ubc88\ud638",
|
||||
"port": "\ud3ec\ud2b8",
|
||||
"ssl": "AdGuard Home \uc740 SSL \uc778\uc99d\uc11c\ub97c \uc0ac\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4",
|
||||
"username": "\uc0ac\uc6a9\uc790 \uc774\ub984",
|
||||
"verify_ssl": "AdGuard Home \uc740 \uc62c\ubc14\ub978 \uc778\uc99d\uc11c\ub97c \uc0ac\uc6a9\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4"
|
||||
},
|
||||
"description": "\ubaa8\ub2c8\ud130\ub9c1 \ubc0f \uc81c\uc5b4\uac00 \uac00\ub2a5\ud558\ub3c4\ub85d AdGuard Home \uc778\uc2a4\ud134\uc2a4\ub97c \uc124\uc815\ud574\uc8fc\uc138\uc694.",
|
||||
"title": "AdGuard Home \uc5f0\uacb0"
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/lb.json
Normal file
30
homeassistant/components/adguard/.translations/lb.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "D\u00e9i bestehend Konfiguratioun ass ge\u00e4nnert.",
|
||||
"single_instance_allowed": "N\u00ebmmen eng eenzeg Konfiguratioun vun AdGuard Home ass erlaabt."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Feeler beim verbannen."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "W\u00ebllt dir Home Assistant konfigur\u00e9iere fir sech mam AdGuard Home ze verbannen dee vum hass.io add-on {addon} bereet gestallt g\u00ebtt?",
|
||||
"title": "AdGuard Home via Hass.io add-on"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Apparat",
|
||||
"password": "Passwuert",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Home benotzt een SSL Zertifikat",
|
||||
"username": "Benotzernumm",
|
||||
"verify_ssl": "AdGuard Home benotzt een eegenen Zertifikat"
|
||||
},
|
||||
"description": "Konfigur\u00e9iert \u00e4r AdGuard Home Instanz fir d'Iwwerwaachung an d'Kontroll z'erlaben.",
|
||||
"title": "Verbannt \u00e4ren AdGuard Home"
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/nl.json
Normal file
30
homeassistant/components/adguard/.translations/nl.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Bestaande configuratie bijgewerkt.",
|
||||
"single_instance_allowed": "Slechts \u00e9\u00e9n configuratie van AdGuard Home is toegestaan."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Kon niet verbinden."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Wilt u Home Assistant configureren om verbinding te maken met AdGuard Home van de Hass.io-add-on: {addon}?",
|
||||
"title": "AdGuard Home via Hass.io add-on"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host",
|
||||
"password": "Wachtwoord",
|
||||
"port": "Poort",
|
||||
"ssl": "AdGuard Home maakt gebruik van een SSL certificaat",
|
||||
"username": "Gebruikersnaam",
|
||||
"verify_ssl": "AdGuard Home maakt gebruik van een goed certificaat"
|
||||
},
|
||||
"description": "Stel uw AdGuard Home-instantie in om toezicht en controle mogelijk te maken.",
|
||||
"title": "Link uw AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/no.json
Normal file
30
homeassistant/components/adguard/.translations/no.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Oppdatert eksisterende konfigurasjon.",
|
||||
"single_instance_allowed": "Kun \u00e9n enkelt konfigurasjon av AdGuard Hjemer tillatt."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Tilkobling mislyktes."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Vil du konfigurere Home Assistant til \u00e5 koble til AdGuard Hjem gitt av hass.io tillegget {addon}?",
|
||||
"title": "AdGuard Hjem via Hass.io tillegg"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Vert",
|
||||
"password": "Passord",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Hjem bruker et SSL-sertifikat",
|
||||
"username": "Brukernavn",
|
||||
"verify_ssl": "AdGuard Home bruker et riktig sertifikat"
|
||||
},
|
||||
"description": "Sett opp din AdGuard Hjem instans for \u00e5 tillate overv\u00e5king og kontroll.",
|
||||
"title": "Koble til ditt AdGuard Hjem."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Hjem"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/pl.json
Normal file
30
homeassistant/components/adguard/.translations/pl.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Zaktualizowano istniej\u0105c\u0105 konfiguracj\u0119.",
|
||||
"single_instance_allowed": "Dozwolona jest tylko jedna konfiguracja AdGuard Home."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Po\u0142\u0105czenie nieudane."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Czy chcesz skonfigurowa\u0107 Home Assistant'a, aby po\u0142\u0105czy\u0142 si\u0119 z AdGuard Home przez dodatek Hass.io {addon}?",
|
||||
"title": "AdGuard Home przez dodatek Hass.io"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host",
|
||||
"password": "Has\u0142o",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Home u\u017cywa certyfikatu SSL",
|
||||
"username": "Nazwa u\u017cytkownika",
|
||||
"verify_ssl": "AdGuard Home u\u017cywa odpowiedniego certyfikatu."
|
||||
},
|
||||
"description": "Skonfiguruj swoj\u0105 instancj\u0119 AdGuard Home, aby umo\u017cliwi\u0107 monitorowanie i nadz\u00f3r sieci.",
|
||||
"title": "Po\u0142\u0105cz sw\u00f3j AdGuard Home"
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/pt-BR.json
Normal file
30
homeassistant/components/adguard/.translations/pt-BR.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Configura\u00e7\u00e3o existente atualizada.",
|
||||
"single_instance_allowed": "Apenas uma \u00fanica configura\u00e7\u00e3o do AdGuard Home \u00e9 permitida."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Falhou ao conectar."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Deseja configurar o Home Assistant para se conectar ao AdGuard Home fornecido pelo complemento Hass.io: {addon} ?",
|
||||
"title": "AdGuard Home via add-on Hass.io"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Host",
|
||||
"password": "Senha",
|
||||
"port": "Porta",
|
||||
"ssl": "O AdGuard Home usa um certificado SSL",
|
||||
"username": "Nome de usu\u00e1rio",
|
||||
"verify_ssl": "O AdGuard Home usa um certificado apropriado"
|
||||
},
|
||||
"description": "Configure sua inst\u00e2ncia do AdGuard Home para permitir o monitoramento e o controle.",
|
||||
"title": "Vincule o seu AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/ru.json
Normal file
30
homeassistant/components/adguard/.translations/ru.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "\u041a\u043e\u043d\u0444\u0438\u0433\u0443\u0440\u0430\u0446\u0438\u044f \u043e\u0431\u043d\u043e\u0432\u043b\u0435\u043d\u0430.",
|
||||
"single_instance_allowed": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u043a\u0430 \u043a\u043e\u043c\u043f\u043e\u043d\u0435\u043d\u0442\u0430 \u0443\u0436\u0435 \u0432\u044b\u043f\u043e\u043b\u043d\u0435\u043d\u0430."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "\u041e\u0448\u0438\u0431\u043a\u0430 \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u044f."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "\u0412\u044b \u0443\u0432\u0435\u0440\u0435\u043d\u044b, \u0447\u0442\u043e \u0445\u043e\u0442\u0438\u0442\u0435 \u043d\u0430\u0441\u0442\u0440\u043e\u0438\u0442\u044c \u043f\u043e\u0434\u043a\u043b\u044e\u0447\u0435\u043d\u0438\u0435 \u043a AdGuard Home (\u0440\u0430\u0441\u0448\u0438\u0440\u0435\u043d\u0438\u0435 \u0434\u043b\u044f Hass.io \"{addon}\")?",
|
||||
"title": "AdGuard Home (\u0440\u0430\u0441\u0448\u0438\u0440\u0435\u043d\u0438\u0435 \u0434\u043b\u044f Hass.io)"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "\u0425\u043e\u0441\u0442",
|
||||
"password": "\u041f\u0430\u0440\u043e\u043b\u044c",
|
||||
"port": "\u041f\u043e\u0440\u0442",
|
||||
"ssl": "AdGuard Home \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u0443\u0435\u0442 \u0441\u0435\u0440\u0442\u0438\u0444\u0438\u043a\u0430\u0442 SSL",
|
||||
"username": "\u041b\u043e\u0433\u0438\u043d",
|
||||
"verify_ssl": "AdGuard Home \u0438\u0441\u043f\u043e\u043b\u044c\u0437\u0443\u0435\u0442 \u0441\u043e\u0431\u0441\u0442\u0432\u0435\u043d\u043d\u044b\u0439 \u0441\u0435\u0440\u0442\u0438\u0444\u0438\u043a\u0430\u0442"
|
||||
},
|
||||
"description": "\u041d\u0430\u0441\u0442\u0440\u043e\u0439\u0442\u0435 \u044d\u0442\u043e\u0442 \u043a\u043e\u043c\u043f\u043e\u043d\u0435\u043d\u0442 \u0434\u043b\u044f \u043c\u043e\u043d\u0438\u0442\u043e\u0440\u0438\u043d\u0433\u0430 \u0438 \u043a\u043e\u043d\u0442\u0440\u043e\u043b\u044f AdGuard Home.",
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/sl.json
Normal file
30
homeassistant/components/adguard/.translations/sl.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Posodobljena obstoje\u010da konfiguracija.",
|
||||
"single_instance_allowed": "Dovoljena je samo ena konfiguracija AdGuard Home."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Povezava ni uspela."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "\u017delite konfigurirati Home Assistant-a za povezavo z AdGuard Home, ki ga ponuja hass.io add-on {addon} ?",
|
||||
"title": "AdGuard Home preko dodatka Hass.io"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "Gostitelj",
|
||||
"password": "Geslo",
|
||||
"port": "Vrata",
|
||||
"ssl": "AdGuard Home uporablja SSL certifikat",
|
||||
"username": "Uporabni\u0161ko ime",
|
||||
"verify_ssl": "AdGuard Home uporablja ustrezen certifikat"
|
||||
},
|
||||
"description": "Nastavite primerek AdGuard Home, da omogo\u010dite spremljanje in nadzor.",
|
||||
"title": "Pove\u017eite svoj AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/sv.json
Normal file
30
homeassistant/components/adguard/.translations/sv.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "Uppdaterade existerande konfiguration.",
|
||||
"single_instance_allowed": "Endast en enda konfiguration av AdGuard Home \u00e4r till\u00e5ten."
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Det gick inte att ansluta."
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "Vill du konfigurera Home Assistant f\u00f6r att ansluta till AdGuard Home som tillhandah\u00e5lls av Hass.io Add-on: {addon}?",
|
||||
"title": "AdGuard Home via Hass.io-till\u00e4gget"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "V\u00e4rd",
|
||||
"password": "L\u00f6senord",
|
||||
"port": "Port",
|
||||
"ssl": "AdGuard Home anv\u00e4nder ett SSL-certifikat",
|
||||
"username": "Anv\u00e4ndarnamn",
|
||||
"verify_ssl": "AdGuard Home anv\u00e4nder ett korrekt certifikat"
|
||||
},
|
||||
"description": "St\u00e4ll in din AdGuard Home-instans f\u00f6r att till\u00e5ta \u00f6vervakning och kontroll.",
|
||||
"title": "L\u00e4nka din AdGuard Home."
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
14
homeassistant/components/adguard/.translations/vi.json
Normal file
14
homeassistant/components/adguard/.translations/vi.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"config": {
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "\u0110\u1ecba ch\u1ec9",
|
||||
"password": "M\u1eadt kh\u1ea9u",
|
||||
"port": "C\u1ed5ng",
|
||||
"username": "T\u00ean \u0111\u0103ng nh\u1eadp"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
16
homeassistant/components/adguard/.translations/zh-Hans.json
Normal file
16
homeassistant/components/adguard/.translations/zh-Hans.json
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "\u66f4\u65b0\u4e86\u73b0\u6709\u914d\u7f6e\u3002"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"password": "\u5bc6\u7801",
|
||||
"port": "\u7aef\u53e3",
|
||||
"username": "\u7528\u6237\u540d"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
30
homeassistant/components/adguard/.translations/zh-Hant.json
Normal file
30
homeassistant/components/adguard/.translations/zh-Hant.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"existing_instance_updated": "\u5df2\u66f4\u65b0\u73fe\u6709\u8a2d\u5b9a\u3002",
|
||||
"single_instance_allowed": "\u50c5\u5141\u8a31\u8a2d\u5b9a\u4e00\u7d44 AdGuard Home\u3002"
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "\u9023\u7dda\u5931\u6557\u3002"
|
||||
},
|
||||
"step": {
|
||||
"hassio_confirm": {
|
||||
"description": "\u662f\u5426\u8981\u8a2d\u5b9a Home Assistant \u4ee5\u4f7f\u7528 Hass.io \u9644\u52a0\u7d44\u4ef6\uff1a{addon} \u9023\u7dda\u81f3 AdGuard Home\uff1f",
|
||||
"title": "\u4f7f\u7528 Hass.io \u9644\u52a0\u7d44\u4ef6 AdGuard Home"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "\u4e3b\u6a5f\u7aef",
|
||||
"password": "\u5bc6\u78bc",
|
||||
"port": "\u901a\u8a0a\u57e0",
|
||||
"ssl": "AdGuard Home \u4f7f\u7528 SSL \u8a8d\u8b49",
|
||||
"username": "\u4f7f\u7528\u8005\u540d\u7a31",
|
||||
"verify_ssl": "AdGuard Home \u4f7f\u7528\u5c0d\u61c9\u8a8d\u8b49"
|
||||
},
|
||||
"description": "\u8a2d\u5b9a AdGuard Home \u4ee5\u9032\u884c\u76e3\u63a7\u3002",
|
||||
"title": "\u9023\u7d50 AdGuard Home\u3002"
|
||||
}
|
||||
},
|
||||
"title": "AdGuard Home"
|
||||
}
|
||||
}
|
||||
185
homeassistant/components/adguard/__init__.py
Normal file
185
homeassistant/components/adguard/__init__.py
Normal file
@@ -0,0 +1,185 @@
|
||||
"""Support for AdGuard Home."""
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from adguardhome import AdGuardHome, AdGuardHomeError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.adguard.const import (
|
||||
CONF_FORCE,
|
||||
DATA_ADGUARD_CLIENT,
|
||||
DATA_ADGUARD_VERION,
|
||||
DOMAIN,
|
||||
SERVICE_ADD_URL,
|
||||
SERVICE_DISABLE_URL,
|
||||
SERVICE_ENABLE_URL,
|
||||
SERVICE_REFRESH,
|
||||
SERVICE_REMOVE_URL,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_SSL,
|
||||
CONF_URL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.typing import ConfigType, HomeAssistantType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_URL_SCHEMA = vol.Schema({vol.Required(CONF_URL): cv.url})
|
||||
SERVICE_ADD_URL_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_NAME): cv.string, vol.Required(CONF_URL): cv.url}
|
||||
)
|
||||
SERVICE_REFRESH_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_FORCE, default=False): cv.boolean}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistantType, config: ConfigType) -> bool:
|
||||
"""Set up the AdGuard Home components."""
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistantType, entry: ConfigEntry) -> bool:
|
||||
"""Set up AdGuard Home from a config entry."""
|
||||
session = async_get_clientsession(hass, entry.data[CONF_VERIFY_SSL])
|
||||
adguard = AdGuardHome(
|
||||
entry.data[CONF_HOST],
|
||||
port=entry.data[CONF_PORT],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
tls=entry.data[CONF_SSL],
|
||||
verify_ssl=entry.data[CONF_VERIFY_SSL],
|
||||
loop=hass.loop,
|
||||
session=session,
|
||||
)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})[DATA_ADGUARD_CLIENT] = adguard
|
||||
|
||||
for component in "sensor", "switch":
|
||||
hass.async_create_task(
|
||||
hass.config_entries.async_forward_entry_setup(entry, component)
|
||||
)
|
||||
|
||||
async def add_url(call) -> None:
|
||||
"""Service call to add a new filter subscription to AdGuard Home."""
|
||||
await adguard.filtering.add_url(
|
||||
call.data.get(CONF_NAME), call.data.get(CONF_URL)
|
||||
)
|
||||
|
||||
async def remove_url(call) -> None:
|
||||
"""Service call to remove a filter subscription from AdGuard Home."""
|
||||
await adguard.filtering.remove_url(call.data.get(CONF_URL))
|
||||
|
||||
async def enable_url(call) -> None:
|
||||
"""Service call to enable a filter subscription in AdGuard Home."""
|
||||
await adguard.filtering.enable_url(call.data.get(CONF_URL))
|
||||
|
||||
async def disable_url(call) -> None:
|
||||
"""Service call to disable a filter subscription in AdGuard Home."""
|
||||
await adguard.filtering.disable_url(call.data.get(CONF_URL))
|
||||
|
||||
async def refresh(call) -> None:
|
||||
"""Service call to refresh the filter subscriptions in AdGuard Home."""
|
||||
await adguard.filtering.refresh(call.data.get(CONF_FORCE))
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ADD_URL, add_url, schema=SERVICE_ADD_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REMOVE_URL, remove_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_ENABLE_URL, enable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_DISABLE_URL, disable_url, schema=SERVICE_URL_SCHEMA
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_REFRESH, refresh, schema=SERVICE_REFRESH_SCHEMA
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistantType, entry: ConfigType) -> bool:
|
||||
"""Unload AdGuard Home config entry."""
|
||||
hass.services.async_remove(DOMAIN, SERVICE_ADD_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_REMOVE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_ENABLE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_DISABLE_URL)
|
||||
hass.services.async_remove(DOMAIN, SERVICE_REFRESH)
|
||||
|
||||
for component in "sensor", "switch":
|
||||
await hass.config_entries.async_forward_entry_unload(entry, component)
|
||||
|
||||
del hass.data[DOMAIN]
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class AdGuardHomeEntity(Entity):
|
||||
"""Defines a base AdGuard Home entity."""
|
||||
|
||||
def __init__(self, adguard, name: str, icon: str) -> None:
|
||||
"""Initialize the AdGuard Home entity."""
|
||||
self._name = name
|
||||
self._icon = icon
|
||||
self._available = True
|
||||
self.adguard = adguard
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the entity."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def icon(self) -> str:
|
||||
"""Return the mdi icon of the entity."""
|
||||
return self._icon
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self._available
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
try:
|
||||
await self._adguard_update()
|
||||
self._available = True
|
||||
except AdGuardHomeError:
|
||||
if self._available:
|
||||
_LOGGER.debug(
|
||||
"An error occurred while updating AdGuard Home sensor.",
|
||||
exc_info=True,
|
||||
)
|
||||
self._available = False
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class AdGuardHomeDeviceEntity(AdGuardHomeEntity):
|
||||
"""Defines a AdGuard Home device entity."""
|
||||
|
||||
@property
|
||||
def device_info(self) -> Dict[str, Any]:
|
||||
"""Return device information about this AdGuard Home instance."""
|
||||
return {
|
||||
"identifiers": {
|
||||
(DOMAIN, self.adguard.host, self.adguard.port, self.adguard.base_path)
|
||||
},
|
||||
"name": "AdGuard Home",
|
||||
"manufacturer": "AdGuard Team",
|
||||
"sw_version": self.hass.data[DOMAIN].get(DATA_ADGUARD_VERION),
|
||||
}
|
||||
174
homeassistant/components/adguard/config_flow.py
Normal file
174
homeassistant/components/adguard/config_flow.py
Normal file
@@ -0,0 +1,174 @@
|
||||
"""Config flow to configure the AdGuard Home integration."""
|
||||
import logging
|
||||
|
||||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.adguard.const import DOMAIN
|
||||
from homeassistant.config_entries import ConfigFlow
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_PASSWORD,
|
||||
CONF_PORT,
|
||||
CONF_SSL,
|
||||
CONF_USERNAME,
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@config_entries.HANDLERS.register(DOMAIN)
|
||||
class AdGuardHomeFlowHandler(ConfigFlow):
|
||||
"""Handle a AdGuard Home config flow."""
|
||||
|
||||
VERSION = 1
|
||||
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
|
||||
|
||||
_hassio_discovery = None
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize AgGuard Home flow."""
|
||||
pass
|
||||
|
||||
async def _show_setup_form(self, errors=None):
|
||||
"""Show the setup form to the user."""
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_PORT, default=3000): vol.Coerce(int),
|
||||
vol.Optional(CONF_USERNAME): str,
|
||||
vol.Optional(CONF_PASSWORD): str,
|
||||
vol.Required(CONF_SSL, default=True): bool,
|
||||
vol.Required(CONF_VERIFY_SSL, default=True): bool,
|
||||
}
|
||||
),
|
||||
errors=errors or {},
|
||||
)
|
||||
|
||||
async def _show_hassio_form(self, errors=None):
|
||||
"""Show the Hass.io confirmation form to the user."""
|
||||
return self.async_show_form(
|
||||
step_id="hassio_confirm",
|
||||
description_placeholders={"addon": self._hassio_discovery["addon"]},
|
||||
data_schema=vol.Schema({}),
|
||||
errors=errors or {},
|
||||
)
|
||||
|
||||
async def async_step_user(self, user_input=None):
|
||||
"""Handle a flow initiated by the user."""
|
||||
if self._async_current_entries():
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
if user_input is None:
|
||||
return await self._show_setup_form(user_input)
|
||||
|
||||
errors = {}
|
||||
|
||||
session = async_get_clientsession(self.hass, user_input[CONF_VERIFY_SSL])
|
||||
|
||||
adguard = AdGuardHome(
|
||||
user_input[CONF_HOST],
|
||||
port=user_input[CONF_PORT],
|
||||
username=user_input.get(CONF_USERNAME),
|
||||
password=user_input.get(CONF_PASSWORD),
|
||||
tls=user_input[CONF_SSL],
|
||||
verify_ssl=user_input[CONF_VERIFY_SSL],
|
||||
loop=self.hass.loop,
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
await adguard.version()
|
||||
except AdGuardHomeConnectionError:
|
||||
errors["base"] = "connection_error"
|
||||
return await self._show_setup_form(errors)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_HOST],
|
||||
data={
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PASSWORD: user_input.get(CONF_PASSWORD),
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_SSL: user_input[CONF_SSL],
|
||||
CONF_USERNAME: user_input.get(CONF_USERNAME),
|
||||
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_hassio(self, user_input=None):
|
||||
"""Prepare configuration for a Hass.io AdGuard Home add-on.
|
||||
|
||||
This flow is triggered by the discovery component.
|
||||
"""
|
||||
entries = self._async_current_entries()
|
||||
|
||||
if not entries:
|
||||
self._hassio_discovery = user_input
|
||||
return await self.async_step_hassio_confirm()
|
||||
|
||||
cur_entry = entries[0]
|
||||
|
||||
if (
|
||||
cur_entry.data[CONF_HOST] == user_input[CONF_HOST]
|
||||
and cur_entry.data[CONF_PORT] == user_input[CONF_PORT]
|
||||
):
|
||||
return self.async_abort(reason="single_instance_allowed")
|
||||
|
||||
is_loaded = cur_entry.state == config_entries.ENTRY_STATE_LOADED
|
||||
|
||||
if is_loaded:
|
||||
await self.hass.config_entries.async_unload(cur_entry.entry_id)
|
||||
|
||||
self.hass.config_entries.async_update_entry(
|
||||
cur_entry,
|
||||
data={
|
||||
**cur_entry.data,
|
||||
CONF_HOST: user_input[CONF_HOST],
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
},
|
||||
)
|
||||
|
||||
if is_loaded:
|
||||
await self.hass.config_entries.async_setup(cur_entry.entry_id)
|
||||
|
||||
return self.async_abort(reason="existing_instance_updated")
|
||||
|
||||
async def async_step_hassio_confirm(self, user_input=None):
|
||||
"""Confirm Hass.io discovery."""
|
||||
if user_input is None:
|
||||
return await self._show_hassio_form()
|
||||
|
||||
errors = {}
|
||||
|
||||
session = async_get_clientsession(self.hass, False)
|
||||
|
||||
adguard = AdGuardHome(
|
||||
self._hassio_discovery[CONF_HOST],
|
||||
port=self._hassio_discovery[CONF_PORT],
|
||||
tls=False,
|
||||
loop=self.hass.loop,
|
||||
session=session,
|
||||
)
|
||||
|
||||
try:
|
||||
await adguard.version()
|
||||
except AdGuardHomeConnectionError:
|
||||
errors["base"] = "connection_error"
|
||||
return await self._show_hassio_form(errors)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self._hassio_discovery["addon"],
|
||||
data={
|
||||
CONF_HOST: self._hassio_discovery[CONF_HOST],
|
||||
CONF_PORT: self._hassio_discovery[CONF_PORT],
|
||||
CONF_PASSWORD: None,
|
||||
CONF_SSL: False,
|
||||
CONF_USERNAME: None,
|
||||
CONF_VERIFY_SSL: True,
|
||||
},
|
||||
)
|
||||
14
homeassistant/components/adguard/const.py
Normal file
14
homeassistant/components/adguard/const.py
Normal file
@@ -0,0 +1,14 @@
|
||||
"""Constants for the AdGuard Home integration."""
|
||||
|
||||
DOMAIN = "adguard"
|
||||
|
||||
DATA_ADGUARD_CLIENT = "adguard_client"
|
||||
DATA_ADGUARD_VERION = "adguard_version"
|
||||
|
||||
CONF_FORCE = "force"
|
||||
|
||||
SERVICE_ADD_URL = "add_url"
|
||||
SERVICE_DISABLE_URL = "disable_url"
|
||||
SERVICE_ENABLE_URL = "enable_url"
|
||||
SERVICE_REFRESH = "refresh"
|
||||
SERVICE_REMOVE_URL = "remove_url"
|
||||
13
homeassistant/components/adguard/manifest.json
Normal file
13
homeassistant/components/adguard/manifest.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"domain": "adguard",
|
||||
"name": "AdGuard Home",
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/components/adguard",
|
||||
"requirements": [
|
||||
"adguardhome==0.2.1"
|
||||
],
|
||||
"dependencies": [],
|
||||
"codeowners": [
|
||||
"@frenck"
|
||||
]
|
||||
}
|
||||
222
homeassistant/components/adguard/sensor.py
Normal file
222
homeassistant/components/adguard/sensor.py
Normal file
@@ -0,0 +1,222 @@
|
||||
"""Support for AdGuard Home sensors."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from adguardhome import AdGuardHomeConnectionError
|
||||
|
||||
from homeassistant.components.adguard import AdGuardHomeDeviceEntity
|
||||
from homeassistant.components.adguard.const import (
|
||||
DATA_ADGUARD_CLIENT,
|
||||
DATA_ADGUARD_VERION,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=300)
|
||||
PARALLEL_UPDATES = 4
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
|
||||
) -> None:
|
||||
"""Set up AdGuard Home sensor based on a config entry."""
|
||||
adguard = hass.data[DOMAIN][DATA_ADGUARD_CLIENT]
|
||||
|
||||
try:
|
||||
version = await adguard.version()
|
||||
except AdGuardHomeConnectionError as exception:
|
||||
raise PlatformNotReady from exception
|
||||
|
||||
hass.data[DOMAIN][DATA_ADGUARD_VERION] = version
|
||||
|
||||
sensors = [
|
||||
AdGuardHomeDNSQueriesSensor(adguard),
|
||||
AdGuardHomeBlockedFilteringSensor(adguard),
|
||||
AdGuardHomePercentageBlockedSensor(adguard),
|
||||
AdGuardHomeReplacedParentalSensor(adguard),
|
||||
AdGuardHomeReplacedSafeBrowsingSensor(adguard),
|
||||
AdGuardHomeReplacedSafeSearchSensor(adguard),
|
||||
AdGuardHomeAverageProcessingTimeSensor(adguard),
|
||||
AdGuardHomeRulesCountSensor(adguard),
|
||||
]
|
||||
|
||||
async_add_entities(sensors, True)
|
||||
|
||||
|
||||
class AdGuardHomeSensor(AdGuardHomeDeviceEntity):
|
||||
"""Defines a AdGuard Home sensor."""
|
||||
|
||||
def __init__(
|
||||
self, adguard, name: str, icon: str, measurement: str, unit_of_measurement: str
|
||||
) -> None:
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
self._state = None
|
||||
self._unit_of_measurement = unit_of_measurement
|
||||
self.measurement = measurement
|
||||
|
||||
super().__init__(adguard, name, icon)
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return the unique ID for this sensor."""
|
||||
return "_".join(
|
||||
[
|
||||
DOMAIN,
|
||||
self.adguard.host,
|
||||
str(self.adguard.port),
|
||||
"sensor",
|
||||
self.measurement,
|
||||
]
|
||||
)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the sensor."""
|
||||
return self._state
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self) -> str:
|
||||
"""Return the unit this state is expressed in."""
|
||||
return self._unit_of_measurement
|
||||
|
||||
|
||||
class AdGuardHomeDNSQueriesSensor(AdGuardHomeSensor):
|
||||
"""Defines a AdGuard Home DNS Queries sensor."""
|
||||
|
||||
def __init__(self, adguard):
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(
|
||||
adguard, "AdGuard DNS Queries", "mdi:magnify", "dns_queries", "queries"
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.stats.dns_queries()
|
||||
|
||||
|
||||
class AdGuardHomeBlockedFilteringSensor(AdGuardHomeSensor):
|
||||
"""Defines a AdGuard Home blocked by filtering sensor."""
|
||||
|
||||
def __init__(self, adguard):
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(
|
||||
adguard,
|
||||
"AdGuard DNS Queries Blocked",
|
||||
"mdi:magnify-close",
|
||||
"blocked_filtering",
|
||||
"queries",
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.stats.blocked_filtering()
|
||||
|
||||
|
||||
class AdGuardHomePercentageBlockedSensor(AdGuardHomeSensor):
|
||||
"""Defines a AdGuard Home blocked percentage sensor."""
|
||||
|
||||
def __init__(self, adguard):
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(
|
||||
adguard,
|
||||
"AdGuard DNS Queries Blocked Ratio",
|
||||
"mdi:magnify-close",
|
||||
"blocked_percentage",
|
||||
"%",
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
percentage = await self.adguard.stats.blocked_percentage()
|
||||
self._state = "{:.2f}".format(percentage)
|
||||
|
||||
|
||||
class AdGuardHomeReplacedParentalSensor(AdGuardHomeSensor):
|
||||
"""Defines a AdGuard Home replaced by parental control sensor."""
|
||||
|
||||
def __init__(self, adguard):
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(
|
||||
adguard,
|
||||
"AdGuard Parental Control Blocked",
|
||||
"mdi:human-male-girl",
|
||||
"blocked_parental",
|
||||
"requests",
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.stats.replaced_parental()
|
||||
|
||||
|
||||
class AdGuardHomeReplacedSafeBrowsingSensor(AdGuardHomeSensor):
|
||||
"""Defines a AdGuard Home replaced by safe browsing sensor."""
|
||||
|
||||
def __init__(self, adguard):
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(
|
||||
adguard,
|
||||
"AdGuard Safe Browsing Blocked",
|
||||
"mdi:shield-half-full",
|
||||
"blocked_safebrowsing",
|
||||
"requests",
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.stats.replaced_safebrowsing()
|
||||
|
||||
|
||||
class AdGuardHomeReplacedSafeSearchSensor(AdGuardHomeSensor):
|
||||
"""Defines a AdGuard Home replaced by safe search sensor."""
|
||||
|
||||
def __init__(self, adguard):
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(
|
||||
adguard,
|
||||
"Searches Safe Search Enforced",
|
||||
"mdi:shield-search",
|
||||
"enforced_safesearch",
|
||||
"requests",
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.stats.replaced_safesearch()
|
||||
|
||||
|
||||
class AdGuardHomeAverageProcessingTimeSensor(AdGuardHomeSensor):
|
||||
"""Defines a AdGuard Home average processing time sensor."""
|
||||
|
||||
def __init__(self, adguard):
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(
|
||||
adguard,
|
||||
"AdGuard Average Processing Speed",
|
||||
"mdi:speedometer",
|
||||
"average_speed",
|
||||
"ms",
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
average = await self.adguard.stats.avg_processing_time()
|
||||
self._state = "{:.2f}".format(average)
|
||||
|
||||
|
||||
class AdGuardHomeRulesCountSensor(AdGuardHomeSensor):
|
||||
"""Defines a AdGuard Home rules count sensor."""
|
||||
|
||||
def __init__(self, adguard):
|
||||
"""Initialize AdGuard Home sensor."""
|
||||
super().__init__(
|
||||
adguard, "AdGuard Rules Count", "mdi:counter", "rules_count", "rules"
|
||||
)
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.filtering.rules_count()
|
||||
37
homeassistant/components/adguard/services.yaml
Normal file
37
homeassistant/components/adguard/services.yaml
Normal file
@@ -0,0 +1,37 @@
|
||||
add_url:
|
||||
description: Add a new filter subscription to AdGuard Home.
|
||||
fields:
|
||||
name:
|
||||
description: The name of the filter subscription.
|
||||
example: Example
|
||||
url:
|
||||
description: The filter URL to subscribe to, containing the filter rules.
|
||||
example: https://www.example.com/filter/1.txt
|
||||
|
||||
remove_url:
|
||||
description: Removes a filter subscription from AdGuard Home.
|
||||
fields:
|
||||
url:
|
||||
description: The filter subscription URL to remove.
|
||||
example: https://www.example.com/filter/1.txt
|
||||
|
||||
enable_url:
|
||||
description: Enables a filter subscription in AdGuard Home.
|
||||
fields:
|
||||
url:
|
||||
description: The filter subscription URL to enable.
|
||||
example: https://www.example.com/filter/1.txt
|
||||
|
||||
disable_url:
|
||||
description: Disables a filter subscription in AdGuard Home.
|
||||
fields:
|
||||
url:
|
||||
description: The filter subscription URL to disable.
|
||||
example: https://www.example.com/filter/1.txt
|
||||
|
||||
refresh:
|
||||
description: Refresh all filter subscriptions in AdGuard Home.
|
||||
fields:
|
||||
force:
|
||||
description: Force update (by passes AdGuard Home throttling).
|
||||
example: '"true" to force, "false" or omit for a regular refresh.'
|
||||
30
homeassistant/components/adguard/strings.json
Normal file
30
homeassistant/components/adguard/strings.json
Normal file
@@ -0,0 +1,30 @@
|
||||
{
|
||||
"config": {
|
||||
"title": "AdGuard Home",
|
||||
"step": {
|
||||
"user": {
|
||||
"title": "Link your AdGuard Home.",
|
||||
"description": "Set up your AdGuard Home instance to allow monitoring and control.",
|
||||
"data": {
|
||||
"host": "Host",
|
||||
"password": "Password",
|
||||
"port": "Port",
|
||||
"username": "Username",
|
||||
"ssl": "AdGuard Home uses a SSL certificate",
|
||||
"verify_ssl": "AdGuard Home uses a proper certificate"
|
||||
}
|
||||
},
|
||||
"hassio_confirm": {
|
||||
"title": "AdGuard Home via Hass.io add-on",
|
||||
"description": "Do you want to configure Home Assistant to connect to the AdGuard Home provided by the Hass.io add-on: {addon}?"
|
||||
}
|
||||
},
|
||||
"error": {
|
||||
"connection_error": "Failed to connect."
|
||||
},
|
||||
"abort": {
|
||||
"single_instance_allowed": "Only a single configuration of AdGuard Home is allowed.",
|
||||
"existing_instance_updated": "Updated existing configuration."
|
||||
}
|
||||
}
|
||||
}
|
||||
219
homeassistant/components/adguard/switch.py
Normal file
219
homeassistant/components/adguard/switch.py
Normal file
@@ -0,0 +1,219 @@
|
||||
"""Support for AdGuard Home switches."""
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from adguardhome import AdGuardHomeConnectionError, AdGuardHomeError
|
||||
|
||||
from homeassistant.components.adguard import AdGuardHomeDeviceEntity
|
||||
from homeassistant.components.adguard.const import (
|
||||
DATA_ADGUARD_CLIENT,
|
||||
DATA_ADGUARD_VERION,
|
||||
DOMAIN,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers.entity import ToggleEntity
|
||||
from homeassistant.helpers.typing import HomeAssistantType
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=10)
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistantType, entry: ConfigEntry, async_add_entities
|
||||
) -> None:
|
||||
"""Set up AdGuard Home switch based on a config entry."""
|
||||
adguard = hass.data[DOMAIN][DATA_ADGUARD_CLIENT]
|
||||
|
||||
try:
|
||||
version = await adguard.version()
|
||||
except AdGuardHomeConnectionError as exception:
|
||||
raise PlatformNotReady from exception
|
||||
|
||||
hass.data[DOMAIN][DATA_ADGUARD_VERION] = version
|
||||
|
||||
switches = [
|
||||
AdGuardHomeProtectionSwitch(adguard),
|
||||
AdGuardHomeFilteringSwitch(adguard),
|
||||
AdGuardHomeParentalSwitch(adguard),
|
||||
AdGuardHomeSafeBrowsingSwitch(adguard),
|
||||
AdGuardHomeSafeSearchSwitch(adguard),
|
||||
AdGuardHomeQueryLogSwitch(adguard),
|
||||
]
|
||||
async_add_entities(switches, True)
|
||||
|
||||
|
||||
class AdGuardHomeSwitch(ToggleEntity, AdGuardHomeDeviceEntity):
|
||||
"""Defines a AdGuard Home switch."""
|
||||
|
||||
def __init__(self, adguard, name: str, icon: str, key: str):
|
||||
"""Initialize AdGuard Home switch."""
|
||||
self._state = False
|
||||
self._key = key
|
||||
super().__init__(adguard, name, icon)
|
||||
|
||||
@property
|
||||
def unique_id(self) -> str:
|
||||
"""Return the unique ID for this sensor."""
|
||||
return "_".join(
|
||||
[DOMAIN, self.adguard.host, str(self.adguard.port), "switch", self._key]
|
||||
)
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the state of the switch."""
|
||||
return self._state
|
||||
|
||||
async def async_turn_off(self, **kwargs) -> None:
|
||||
"""Turn off the switch."""
|
||||
try:
|
||||
await self._adguard_turn_off()
|
||||
except AdGuardHomeError:
|
||||
_LOGGER.error("An error occurred while turning off AdGuard Home switch.")
|
||||
self._available = False
|
||||
|
||||
async def _adguard_turn_off(self) -> None:
|
||||
"""Turn off the switch."""
|
||||
raise NotImplementedError()
|
||||
|
||||
async def async_turn_on(self, **kwargs) -> None:
|
||||
"""Turn on the switch."""
|
||||
try:
|
||||
await self._adguard_turn_on()
|
||||
except AdGuardHomeError:
|
||||
_LOGGER.error("An error occurred while turning on AdGuard Home switch.")
|
||||
self._available = False
|
||||
|
||||
async def _adguard_turn_on(self) -> None:
|
||||
"""Turn on the switch."""
|
||||
raise NotImplementedError()
|
||||
|
||||
|
||||
class AdGuardHomeProtectionSwitch(AdGuardHomeSwitch):
|
||||
"""Defines a AdGuard Home protection switch."""
|
||||
|
||||
def __init__(self, adguard) -> None:
|
||||
"""Initialize AdGuard Home switch."""
|
||||
super().__init__(
|
||||
adguard, "AdGuard Protection", "mdi:shield-check", "protection"
|
||||
)
|
||||
|
||||
async def _adguard_turn_off(self) -> None:
|
||||
"""Turn off the switch."""
|
||||
await self.adguard.disable_protection()
|
||||
|
||||
async def _adguard_turn_on(self) -> None:
|
||||
"""Turn on the switch."""
|
||||
await self.adguard.enable_protection()
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.protection_enabled()
|
||||
|
||||
|
||||
class AdGuardHomeParentalSwitch(AdGuardHomeSwitch):
|
||||
"""Defines a AdGuard Home parental control switch."""
|
||||
|
||||
def __init__(self, adguard) -> None:
|
||||
"""Initialize AdGuard Home switch."""
|
||||
super().__init__(
|
||||
adguard, "AdGuard Parental Control", "mdi:shield-check", "parental"
|
||||
)
|
||||
|
||||
async def _adguard_turn_off(self) -> None:
|
||||
"""Turn off the switch."""
|
||||
await self.adguard.parental.disable()
|
||||
|
||||
async def _adguard_turn_on(self) -> None:
|
||||
"""Turn on the switch."""
|
||||
await self.adguard.parental.enable()
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.parental.enabled()
|
||||
|
||||
|
||||
class AdGuardHomeSafeSearchSwitch(AdGuardHomeSwitch):
|
||||
"""Defines a AdGuard Home safe search switch."""
|
||||
|
||||
def __init__(self, adguard) -> None:
|
||||
"""Initialize AdGuard Home switch."""
|
||||
super().__init__(
|
||||
adguard, "AdGuard Safe Search", "mdi:shield-check", "safesearch"
|
||||
)
|
||||
|
||||
async def _adguard_turn_off(self) -> None:
|
||||
"""Turn off the switch."""
|
||||
await self.adguard.safesearch.disable()
|
||||
|
||||
async def _adguard_turn_on(self) -> None:
|
||||
"""Turn on the switch."""
|
||||
await self.adguard.safesearch.enable()
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.safesearch.enabled()
|
||||
|
||||
|
||||
class AdGuardHomeSafeBrowsingSwitch(AdGuardHomeSwitch):
|
||||
"""Defines a AdGuard Home safe search switch."""
|
||||
|
||||
def __init__(self, adguard) -> None:
|
||||
"""Initialize AdGuard Home switch."""
|
||||
super().__init__(
|
||||
adguard, "AdGuard Safe Browsing", "mdi:shield-check", "safebrowsing"
|
||||
)
|
||||
|
||||
async def _adguard_turn_off(self) -> None:
|
||||
"""Turn off the switch."""
|
||||
await self.adguard.safebrowsing.disable()
|
||||
|
||||
async def _adguard_turn_on(self) -> None:
|
||||
"""Turn on the switch."""
|
||||
await self.adguard.safebrowsing.enable()
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.safebrowsing.enabled()
|
||||
|
||||
|
||||
class AdGuardHomeFilteringSwitch(AdGuardHomeSwitch):
|
||||
"""Defines a AdGuard Home filtering switch."""
|
||||
|
||||
def __init__(self, adguard) -> None:
|
||||
"""Initialize AdGuard Home switch."""
|
||||
super().__init__(adguard, "AdGuard Filtering", "mdi:shield-check", "filtering")
|
||||
|
||||
async def _adguard_turn_off(self) -> None:
|
||||
"""Turn off the switch."""
|
||||
await self.adguard.filtering.disable()
|
||||
|
||||
async def _adguard_turn_on(self) -> None:
|
||||
"""Turn on the switch."""
|
||||
await self.adguard.filtering.enable()
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.filtering.enabled()
|
||||
|
||||
|
||||
class AdGuardHomeQueryLogSwitch(AdGuardHomeSwitch):
|
||||
"""Defines a AdGuard Home query log switch."""
|
||||
|
||||
def __init__(self, adguard) -> None:
|
||||
"""Initialize AdGuard Home switch."""
|
||||
super().__init__(adguard, "AdGuard Query Log", "mdi:shield-check", "querylog")
|
||||
|
||||
async def _adguard_turn_off(self) -> None:
|
||||
"""Turn off the switch."""
|
||||
await self.adguard.querylog.disable()
|
||||
|
||||
async def _adguard_turn_on(self) -> None:
|
||||
"""Turn on the switch."""
|
||||
await self.adguard.querylog.enable()
|
||||
|
||||
async def _adguard_update(self) -> None:
|
||||
"""Update AdGuard Home entity."""
|
||||
self._state = await self.adguard.querylog.enabled()
|
||||
@@ -10,57 +10,76 @@ import async_timeout
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_DEVICE, CONF_IP_ADDRESS, CONF_PORT, EVENT_HOMEASSISTANT_STOP)
|
||||
CONF_DEVICE,
|
||||
CONF_IP_ADDRESS,
|
||||
CONF_PORT,
|
||||
EVENT_HOMEASSISTANT_STOP,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_ADS = 'data_ads'
|
||||
DATA_ADS = "data_ads"
|
||||
|
||||
# Supported Types
|
||||
ADSTYPE_BOOL = 'bool'
|
||||
ADSTYPE_BYTE = 'byte'
|
||||
ADSTYPE_DINT = 'dint'
|
||||
ADSTYPE_INT = 'int'
|
||||
ADSTYPE_UDINT = 'udint'
|
||||
ADSTYPE_UINT = 'uint'
|
||||
ADSTYPE_BOOL = "bool"
|
||||
ADSTYPE_BYTE = "byte"
|
||||
ADSTYPE_DINT = "dint"
|
||||
ADSTYPE_INT = "int"
|
||||
ADSTYPE_UDINT = "udint"
|
||||
ADSTYPE_UINT = "uint"
|
||||
|
||||
CONF_ADS_FACTOR = 'factor'
|
||||
CONF_ADS_TYPE = 'adstype'
|
||||
CONF_ADS_VALUE = 'value'
|
||||
CONF_ADS_VAR = 'adsvar'
|
||||
CONF_ADS_VAR_BRIGHTNESS = 'adsvar_brightness'
|
||||
CONF_ADS_VAR_POSITION = 'adsvar_position'
|
||||
CONF_ADS_FACTOR = "factor"
|
||||
CONF_ADS_TYPE = "adstype"
|
||||
CONF_ADS_VALUE = "value"
|
||||
CONF_ADS_VAR = "adsvar"
|
||||
CONF_ADS_VAR_BRIGHTNESS = "adsvar_brightness"
|
||||
CONF_ADS_VAR_POSITION = "adsvar_position"
|
||||
|
||||
STATE_KEY_STATE = 'state'
|
||||
STATE_KEY_BRIGHTNESS = 'brightness'
|
||||
STATE_KEY_POSITION = 'position'
|
||||
STATE_KEY_STATE = "state"
|
||||
STATE_KEY_BRIGHTNESS = "brightness"
|
||||
STATE_KEY_POSITION = "position"
|
||||
|
||||
DOMAIN = 'ads'
|
||||
DOMAIN = "ads"
|
||||
|
||||
SERVICE_WRITE_DATA_BY_NAME = 'write_data_by_name'
|
||||
SERVICE_WRITE_DATA_BY_NAME = "write_data_by_name"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_DEVICE): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_IP_ADDRESS): cv.string,
|
||||
})
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DEVICE): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_IP_ADDRESS): cv.string,
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
SCHEMA_SERVICE_WRITE_DATA_BY_NAME = vol.Schema({
|
||||
vol.Required(CONF_ADS_TYPE):
|
||||
vol.In([ADSTYPE_INT, ADSTYPE_UINT, ADSTYPE_BYTE, ADSTYPE_BOOL,
|
||||
ADSTYPE_DINT, ADSTYPE_UDINT]),
|
||||
vol.Required(CONF_ADS_VALUE): vol.Coerce(int),
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
})
|
||||
SCHEMA_SERVICE_WRITE_DATA_BY_NAME = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ADS_TYPE): vol.In(
|
||||
[
|
||||
ADSTYPE_INT,
|
||||
ADSTYPE_UINT,
|
||||
ADSTYPE_BYTE,
|
||||
ADSTYPE_BOOL,
|
||||
ADSTYPE_DINT,
|
||||
ADSTYPE_UDINT,
|
||||
]
|
||||
),
|
||||
vol.Required(CONF_ADS_VALUE): vol.Coerce(int),
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Set up the ADS component."""
|
||||
import pyads
|
||||
|
||||
conf = config[DOMAIN]
|
||||
|
||||
net_id = conf.get(CONF_DEVICE)
|
||||
@@ -91,7 +110,10 @@ def setup(hass, config):
|
||||
except pyads.ADSError:
|
||||
_LOGGER.error(
|
||||
"Could not connect to ADS host (netid=%s, ip=%s, port=%s)",
|
||||
net_id, ip_address, port)
|
||||
net_id,
|
||||
ip_address,
|
||||
port,
|
||||
)
|
||||
return False
|
||||
|
||||
hass.data[DATA_ADS] = ads
|
||||
@@ -109,15 +131,18 @@ def setup(hass, config):
|
||||
_LOGGER.error(err)
|
||||
|
||||
hass.services.register(
|
||||
DOMAIN, SERVICE_WRITE_DATA_BY_NAME, handle_write_data_by_name,
|
||||
schema=SCHEMA_SERVICE_WRITE_DATA_BY_NAME)
|
||||
DOMAIN,
|
||||
SERVICE_WRITE_DATA_BY_NAME,
|
||||
handle_write_data_by_name,
|
||||
schema=SCHEMA_SERVICE_WRITE_DATA_BY_NAME,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# Tuple to hold data needed for notification
|
||||
NotificationItem = namedtuple(
|
||||
'NotificationItem', 'hnotify huser name plc_datatype callback'
|
||||
"NotificationItem", "hnotify huser name plc_datatype callback"
|
||||
)
|
||||
|
||||
|
||||
@@ -137,15 +162,17 @@ class AdsHub:
|
||||
def shutdown(self, *args, **kwargs):
|
||||
"""Shutdown ADS connection."""
|
||||
import pyads
|
||||
|
||||
_LOGGER.debug("Shutting down ADS")
|
||||
for notification_item in self._notification_items.values():
|
||||
_LOGGER.debug(
|
||||
"Deleting device notification %d, %d",
|
||||
notification_item.hnotify, notification_item.huser)
|
||||
notification_item.hnotify,
|
||||
notification_item.huser,
|
||||
)
|
||||
try:
|
||||
self._client.del_device_notification(
|
||||
notification_item.hnotify,
|
||||
notification_item.huser
|
||||
notification_item.hnotify, notification_item.huser
|
||||
)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error(err)
|
||||
@@ -161,6 +188,7 @@ class AdsHub:
|
||||
def write_by_name(self, name, value, plc_datatype):
|
||||
"""Write a value to the device."""
|
||||
import pyads
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
return self._client.write_by_name(name, value, plc_datatype)
|
||||
@@ -170,6 +198,7 @@ class AdsHub:
|
||||
def read_by_name(self, name, plc_datatype):
|
||||
"""Read a value from the device."""
|
||||
import pyads
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
return self._client.read_by_name(name, plc_datatype)
|
||||
@@ -179,22 +208,25 @@ class AdsHub:
|
||||
def add_device_notification(self, name, plc_datatype, callback):
|
||||
"""Add a notification to the ADS devices."""
|
||||
import pyads
|
||||
|
||||
attr = pyads.NotificationAttrib(ctypes.sizeof(plc_datatype))
|
||||
|
||||
with self._lock:
|
||||
try:
|
||||
hnotify, huser = self._client.add_device_notification(
|
||||
name, attr, self._device_notification_callback)
|
||||
name, attr, self._device_notification_callback
|
||||
)
|
||||
except pyads.ADSError as err:
|
||||
_LOGGER.error("Error subscribing to %s: %s", name, err)
|
||||
else:
|
||||
hnotify = int(hnotify)
|
||||
self._notification_items[hnotify] = NotificationItem(
|
||||
hnotify, huser, name, plc_datatype, callback)
|
||||
hnotify, huser, name, plc_datatype, callback
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Added device notification %d for variable %s",
|
||||
hnotify, name)
|
||||
"Added device notification %d for variable %s", hnotify, name
|
||||
)
|
||||
|
||||
def _device_notification_callback(self, notification, name):
|
||||
"""Handle device notifications."""
|
||||
@@ -213,17 +245,17 @@ class AdsHub:
|
||||
|
||||
# Parse data to desired datatype
|
||||
if notification_item.plc_datatype == self.PLCTYPE_BOOL:
|
||||
value = bool(struct.unpack('<?', bytearray(data)[:1])[0])
|
||||
value = bool(struct.unpack("<?", bytearray(data)[:1])[0])
|
||||
elif notification_item.plc_datatype == self.PLCTYPE_INT:
|
||||
value = struct.unpack('<h', bytearray(data)[:2])[0]
|
||||
value = struct.unpack("<h", bytearray(data)[:2])[0]
|
||||
elif notification_item.plc_datatype == self.PLCTYPE_BYTE:
|
||||
value = struct.unpack('<B', bytearray(data)[:1])[0]
|
||||
value = struct.unpack("<B", bytearray(data)[:1])[0]
|
||||
elif notification_item.plc_datatype == self.PLCTYPE_UINT:
|
||||
value = struct.unpack('<H', bytearray(data)[:2])[0]
|
||||
value = struct.unpack("<H", bytearray(data)[:2])[0]
|
||||
elif notification_item.plc_datatype == self.PLCTYPE_DINT:
|
||||
value = struct.unpack('<i', bytearray(data)[:4])[0]
|
||||
value = struct.unpack("<i", bytearray(data)[:4])[0]
|
||||
elif notification_item.plc_datatype == self.PLCTYPE_UDINT:
|
||||
value = struct.unpack('<I', bytearray(data)[:4])[0]
|
||||
value = struct.unpack("<I", bytearray(data)[:4])[0]
|
||||
else:
|
||||
value = bytearray(data)
|
||||
_LOGGER.warning("No callback available for this datatype")
|
||||
@@ -245,11 +277,13 @@ class AdsEntity(Entity):
|
||||
self._event = None
|
||||
|
||||
async def async_initialize_device(
|
||||
self, ads_var, plctype, state_key=STATE_KEY_STATE, factor=None):
|
||||
self, ads_var, plctype, state_key=STATE_KEY_STATE, factor=None
|
||||
):
|
||||
"""Register device notification."""
|
||||
|
||||
def update(name, value):
|
||||
"""Handle device notifications."""
|
||||
_LOGGER.debug('Variable %s changed its value to %d', name, value)
|
||||
_LOGGER.debug("Variable %s changed its value to %d", name, value)
|
||||
|
||||
if factor is None:
|
||||
self._state_dict[state_key] = value
|
||||
@@ -266,14 +300,13 @@ class AdsEntity(Entity):
|
||||
self._event = asyncio.Event()
|
||||
|
||||
await self.hass.async_add_executor_job(
|
||||
self._ads_hub.add_device_notification,
|
||||
ads_var, plctype, update)
|
||||
self._ads_hub.add_device_notification, ads_var, plctype, update
|
||||
)
|
||||
try:
|
||||
with async_timeout.timeout(10):
|
||||
await self._event.wait()
|
||||
except asyncio.TimeoutError:
|
||||
_LOGGER.debug('Variable %s: Timeout during first update',
|
||||
ads_var)
|
||||
_LOGGER.debug("Variable %s: Timeout during first update", ads_var)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
||||
@@ -4,7 +4,10 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
DEVICE_CLASSES_SCHEMA, PLATFORM_SCHEMA, BinarySensorDevice)
|
||||
DEVICE_CLASSES_SCHEMA,
|
||||
PLATFORM_SCHEMA,
|
||||
BinarySensorDevice,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
@@ -12,12 +15,14 @@ from . import CONF_ADS_VAR, DATA_ADS, AdsEntity, STATE_KEY_STATE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'ADS binary sensor'
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
})
|
||||
DEFAULT_NAME = "ADS binary sensor"
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -38,12 +43,11 @@ class AdsBinarySensor(AdsEntity, BinarySensorDevice):
|
||||
def __init__(self, ads_hub, name, ads_var, device_class):
|
||||
"""Initialize ADS binary sensor."""
|
||||
super().__init__(ads_hub, name, ads_var)
|
||||
self._device_class = device_class or 'moving'
|
||||
self._device_class = device_class or "moving"
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register device notification."""
|
||||
await self.async_initialize_device(self._ads_var,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
await self.async_initialize_device(self._ads_var, self._ads_hub.PLCTYPE_BOOL)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
|
||||
@@ -4,35 +4,48 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
PLATFORM_SCHEMA, SUPPORT_OPEN, SUPPORT_CLOSE, SUPPORT_STOP,
|
||||
SUPPORT_SET_POSITION, ATTR_POSITION, DEVICE_CLASSES_SCHEMA,
|
||||
CoverDevice)
|
||||
from homeassistant.const import (
|
||||
CONF_NAME, CONF_DEVICE_CLASS)
|
||||
PLATFORM_SCHEMA,
|
||||
SUPPORT_OPEN,
|
||||
SUPPORT_CLOSE,
|
||||
SUPPORT_STOP,
|
||||
SUPPORT_SET_POSITION,
|
||||
ATTR_POSITION,
|
||||
DEVICE_CLASSES_SCHEMA,
|
||||
CoverDevice,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, CONF_DEVICE_CLASS
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from . import CONF_ADS_VAR, CONF_ADS_VAR_POSITION, DATA_ADS, \
|
||||
AdsEntity, STATE_KEY_STATE, STATE_KEY_POSITION
|
||||
from . import (
|
||||
CONF_ADS_VAR,
|
||||
CONF_ADS_VAR_POSITION,
|
||||
DATA_ADS,
|
||||
AdsEntity,
|
||||
STATE_KEY_STATE,
|
||||
STATE_KEY_POSITION,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'ADS Cover'
|
||||
DEFAULT_NAME = "ADS Cover"
|
||||
|
||||
CONF_ADS_VAR_SET_POS = 'adsvar_set_position'
|
||||
CONF_ADS_VAR_OPEN = 'adsvar_open'
|
||||
CONF_ADS_VAR_CLOSE = 'adsvar_close'
|
||||
CONF_ADS_VAR_STOP = 'adsvar_stop'
|
||||
CONF_ADS_VAR_SET_POS = "adsvar_set_position"
|
||||
CONF_ADS_VAR_OPEN = "adsvar_open"
|
||||
CONF_ADS_VAR_CLOSE = "adsvar_close"
|
||||
CONF_ADS_VAR_STOP = "adsvar_stop"
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Optional(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_POSITION): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_SET_POS): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_CLOSE): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_OPEN): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_STOP): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_POSITION): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_SET_POS): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_CLOSE): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_OPEN): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_STOP): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -48,24 +61,38 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
name = config[CONF_NAME]
|
||||
device_class = config.get(CONF_DEVICE_CLASS)
|
||||
|
||||
add_entities([AdsCover(ads_hub,
|
||||
ads_var_is_closed,
|
||||
ads_var_position,
|
||||
ads_var_pos_set,
|
||||
ads_var_open,
|
||||
ads_var_close,
|
||||
ads_var_stop,
|
||||
name,
|
||||
device_class)])
|
||||
add_entities(
|
||||
[
|
||||
AdsCover(
|
||||
ads_hub,
|
||||
ads_var_is_closed,
|
||||
ads_var_position,
|
||||
ads_var_pos_set,
|
||||
ads_var_open,
|
||||
ads_var_close,
|
||||
ads_var_stop,
|
||||
name,
|
||||
device_class,
|
||||
)
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class AdsCover(AdsEntity, CoverDevice):
|
||||
"""Representation of ADS cover."""
|
||||
|
||||
def __init__(self, ads_hub,
|
||||
ads_var_is_closed, ads_var_position,
|
||||
ads_var_pos_set, ads_var_open,
|
||||
ads_var_close, ads_var_stop, name, device_class):
|
||||
def __init__(
|
||||
self,
|
||||
ads_hub,
|
||||
ads_var_is_closed,
|
||||
ads_var_position,
|
||||
ads_var_pos_set,
|
||||
ads_var_open,
|
||||
ads_var_close,
|
||||
ads_var_stop,
|
||||
name,
|
||||
device_class,
|
||||
):
|
||||
"""Initialize AdsCover entity."""
|
||||
super().__init__(ads_hub, name, ads_var_is_closed)
|
||||
if self._ads_var is None:
|
||||
@@ -87,13 +114,14 @@ class AdsCover(AdsEntity, CoverDevice):
|
||||
async def async_added_to_hass(self):
|
||||
"""Register device notification."""
|
||||
if self._ads_var is not None:
|
||||
await self.async_initialize_device(self._ads_var,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
await self.async_initialize_device(
|
||||
self._ads_var, self._ads_hub.PLCTYPE_BOOL
|
||||
)
|
||||
|
||||
if self._ads_var_position is not None:
|
||||
await self.async_initialize_device(self._ads_var_position,
|
||||
self._ads_hub.PLCTYPE_BYTE,
|
||||
STATE_KEY_POSITION)
|
||||
await self.async_initialize_device(
|
||||
self._ads_var_position, self._ads_hub.PLCTYPE_BYTE, STATE_KEY_POSITION
|
||||
)
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
@@ -130,29 +158,33 @@ class AdsCover(AdsEntity, CoverDevice):
|
||||
def stop_cover(self, **kwargs):
|
||||
"""Fire the stop action."""
|
||||
if self._ads_var_stop:
|
||||
self._ads_hub.write_by_name(self._ads_var_stop, True,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
self._ads_hub.write_by_name(
|
||||
self._ads_var_stop, True, self._ads_hub.PLCTYPE_BOOL
|
||||
)
|
||||
|
||||
def set_cover_position(self, **kwargs):
|
||||
"""Set cover position."""
|
||||
position = kwargs[ATTR_POSITION]
|
||||
if self._ads_var_pos_set is not None:
|
||||
self._ads_hub.write_by_name(self._ads_var_pos_set, position,
|
||||
self._ads_hub.PLCTYPE_BYTE)
|
||||
self._ads_hub.write_by_name(
|
||||
self._ads_var_pos_set, position, self._ads_hub.PLCTYPE_BYTE
|
||||
)
|
||||
|
||||
def open_cover(self, **kwargs):
|
||||
"""Move the cover up."""
|
||||
if self._ads_var_open is not None:
|
||||
self._ads_hub.write_by_name(self._ads_var_open, True,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
self._ads_hub.write_by_name(
|
||||
self._ads_var_open, True, self._ads_hub.PLCTYPE_BOOL
|
||||
)
|
||||
elif self._ads_var_pos_set is not None:
|
||||
self.set_cover_position(position=100)
|
||||
|
||||
def close_cover(self, **kwargs):
|
||||
"""Move the cover down."""
|
||||
if self._ads_var_close is not None:
|
||||
self._ads_hub.write_by_name(self._ads_var_close, True,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
self._ads_hub.write_by_name(
|
||||
self._ads_var_close, True, self._ads_hub.PLCTYPE_BOOL
|
||||
)
|
||||
elif self._ads_var_pos_set is not None:
|
||||
self.set_cover_position(position=0)
|
||||
|
||||
@@ -160,6 +192,8 @@ class AdsCover(AdsEntity, CoverDevice):
|
||||
def available(self):
|
||||
"""Return False if state has not been updated yet."""
|
||||
if self._ads_var is not None or self._ads_var_position is not None:
|
||||
return self._state_dict[STATE_KEY_STATE] is not None or \
|
||||
self._state_dict[STATE_KEY_POSITION] is not None
|
||||
return (
|
||||
self._state_dict[STATE_KEY_STATE] is not None
|
||||
or self._state_dict[STATE_KEY_POSITION] is not None
|
||||
)
|
||||
return True
|
||||
|
||||
@@ -4,20 +4,32 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, Light)
|
||||
ATTR_BRIGHTNESS,
|
||||
PLATFORM_SCHEMA,
|
||||
SUPPORT_BRIGHTNESS,
|
||||
Light,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from . import CONF_ADS_VAR, CONF_ADS_VAR_BRIGHTNESS, DATA_ADS, \
|
||||
AdsEntity, STATE_KEY_BRIGHTNESS, STATE_KEY_STATE
|
||||
from . import (
|
||||
CONF_ADS_VAR,
|
||||
CONF_ADS_VAR_BRIGHTNESS,
|
||||
DATA_ADS,
|
||||
AdsEntity,
|
||||
STATE_KEY_BRIGHTNESS,
|
||||
STATE_KEY_STATE,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
DEFAULT_NAME = 'ADS Light'
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_BRIGHTNESS): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string
|
||||
})
|
||||
DEFAULT_NAME = "ADS Light"
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_VAR_BRIGHTNESS): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -28,8 +40,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
ads_var_brightness = config.get(CONF_ADS_VAR_BRIGHTNESS)
|
||||
name = config.get(CONF_NAME)
|
||||
|
||||
add_entities([AdsLight(ads_hub, ads_var_enable, ads_var_brightness,
|
||||
name)])
|
||||
add_entities([AdsLight(ads_hub, ads_var_enable, ads_var_brightness, name)])
|
||||
|
||||
|
||||
class AdsLight(AdsEntity, Light):
|
||||
@@ -43,13 +54,14 @@ class AdsLight(AdsEntity, Light):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register device notification."""
|
||||
await self.async_initialize_device(self._ads_var,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
await self.async_initialize_device(self._ads_var, self._ads_hub.PLCTYPE_BOOL)
|
||||
|
||||
if self._ads_var_brightness is not None:
|
||||
await self.async_initialize_device(self._ads_var_brightness,
|
||||
self._ads_hub.PLCTYPE_UINT,
|
||||
STATE_KEY_BRIGHTNESS)
|
||||
await self.async_initialize_device(
|
||||
self._ads_var_brightness,
|
||||
self._ads_hub.PLCTYPE_UINT,
|
||||
STATE_KEY_BRIGHTNESS,
|
||||
)
|
||||
|
||||
@property
|
||||
def brightness(self):
|
||||
@@ -72,14 +84,13 @@ class AdsLight(AdsEntity, Light):
|
||||
def turn_on(self, **kwargs):
|
||||
"""Turn the light on or set a specific dimmer value."""
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
self._ads_hub.write_by_name(self._ads_var, True,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
self._ads_hub.write_by_name(self._ads_var, True, self._ads_hub.PLCTYPE_BOOL)
|
||||
|
||||
if self._ads_var_brightness is not None and brightness is not None:
|
||||
self._ads_hub.write_by_name(self._ads_var_brightness, brightness,
|
||||
self._ads_hub.PLCTYPE_UINT)
|
||||
self._ads_hub.write_by_name(
|
||||
self._ads_var_brightness, brightness, self._ads_hub.PLCTYPE_UINT
|
||||
)
|
||||
|
||||
def turn_off(self, **kwargs):
|
||||
"""Turn the light off."""
|
||||
self._ads_hub.write_by_name(self._ads_var, False,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
self._ads_hub.write_by_name(self._ads_var, False, self._ads_hub.PLCTYPE_BOOL)
|
||||
|
||||
@@ -8,21 +8,28 @@ from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from . import CONF_ADS_FACTOR, CONF_ADS_TYPE, CONF_ADS_VAR, \
|
||||
AdsEntity, STATE_KEY_STATE
|
||||
from . import CONF_ADS_FACTOR, CONF_ADS_TYPE, CONF_ADS_VAR, AdsEntity, STATE_KEY_STATE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = "ADS sensor"
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_FACTOR): cv.positive_int,
|
||||
vol.Optional(CONF_ADS_TYPE, default=ads.ADSTYPE_INT):
|
||||
vol.In([ads.ADSTYPE_INT, ads.ADSTYPE_UINT, ads.ADSTYPE_BYTE,
|
||||
ads.ADSTYPE_DINT, ads.ADSTYPE_UDINT]),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=''): cv.string,
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_ADS_FACTOR): cv.positive_int,
|
||||
vol.Optional(CONF_ADS_TYPE, default=ads.ADSTYPE_INT): vol.In(
|
||||
[
|
||||
ads.ADSTYPE_INT,
|
||||
ads.ADSTYPE_UINT,
|
||||
ads.ADSTYPE_BYTE,
|
||||
ads.ADSTYPE_DINT,
|
||||
ads.ADSTYPE_UDINT,
|
||||
]
|
||||
),
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT, default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -35,8 +42,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT)
|
||||
factor = config.get(CONF_ADS_FACTOR)
|
||||
|
||||
entity = AdsSensor(
|
||||
ads_hub, ads_var, ads_type, name, unit_of_measurement, factor)
|
||||
entity = AdsSensor(ads_hub, ads_var, ads_type, name, unit_of_measurement, factor)
|
||||
|
||||
add_entities([entity])
|
||||
|
||||
@@ -44,8 +50,7 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
class AdsSensor(AdsEntity):
|
||||
"""Representation of an ADS sensor entity."""
|
||||
|
||||
def __init__(self, ads_hub, ads_var, ads_type, name, unit_of_measurement,
|
||||
factor):
|
||||
def __init__(self, ads_hub, ads_var, ads_type, name, unit_of_measurement, factor):
|
||||
"""Initialize AdsSensor entity."""
|
||||
super().__init__(ads_hub, name, ads_var)
|
||||
self._unit_of_measurement = unit_of_measurement
|
||||
@@ -58,7 +63,8 @@ class AdsSensor(AdsEntity):
|
||||
self._ads_var,
|
||||
self._ads_hub.ADS_TYPEMAP[self._ads_type],
|
||||
STATE_KEY_STATE,
|
||||
self._factor)
|
||||
self._factor,
|
||||
)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
|
||||
@@ -11,12 +11,11 @@ from . import CONF_ADS_VAR, DATA_ADS, AdsEntity, STATE_KEY_STATE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'ADS Switch'
|
||||
DEFAULT_NAME = "ADS Switch"
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_ADS_VAR): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{vol.Required(CONF_ADS_VAR): cv.string, vol.Optional(CONF_NAME): cv.string}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -34,8 +33,7 @@ class AdsSwitch(AdsEntity, SwitchDevice):
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register device notification."""
|
||||
await self.async_initialize_device(self._ads_var,
|
||||
self._ads_hub.PLCTYPE_BOOL)
|
||||
await self.async_initialize_device(self._ads_var, self._ads_hub.PLCTYPE_BOOL)
|
||||
|
||||
@property
|
||||
def is_on(self):
|
||||
@@ -44,10 +42,8 @@ class AdsSwitch(AdsEntity, SwitchDevice):
|
||||
|
||||
def turn_on(self, **kwargs):
|
||||
"""Turn the switch on."""
|
||||
self._ads_hub.write_by_name(
|
||||
self._ads_var, True, self._ads_hub.PLCTYPE_BOOL)
|
||||
self._ads_hub.write_by_name(self._ads_var, True, self._ads_hub.PLCTYPE_BOOL)
|
||||
|
||||
def turn_off(self, **kwargs):
|
||||
"""Turn the switch off."""
|
||||
self._ads_hub.write_by_name(
|
||||
self._ads_var, False, self._ads_hub.PLCTYPE_BOOL)
|
||||
self._ads_hub.write_by_name(self._ads_var, False, self._ads_hub.PLCTYPE_BOOL)
|
||||
|
||||
@@ -1,2 +1,2 @@
|
||||
"""Constants for the Aftership integration."""
|
||||
DOMAIN = 'aftership'
|
||||
DOMAIN = "aftership"
|
||||
|
||||
@@ -15,24 +15,24 @@ from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTRIBUTION = 'Information provided by AfterShip'
|
||||
ATTR_TRACKINGS = 'trackings'
|
||||
ATTRIBUTION = "Information provided by AfterShip"
|
||||
ATTR_TRACKINGS = "trackings"
|
||||
|
||||
BASE = 'https://track.aftership.com/'
|
||||
BASE = "https://track.aftership.com/"
|
||||
|
||||
CONF_SLUG = 'slug'
|
||||
CONF_TITLE = 'title'
|
||||
CONF_TRACKING_NUMBER = 'tracking_number'
|
||||
CONF_SLUG = "slug"
|
||||
CONF_TITLE = "title"
|
||||
CONF_TRACKING_NUMBER = "tracking_number"
|
||||
|
||||
DEFAULT_NAME = 'aftership'
|
||||
UPDATE_TOPIC = DOMAIN + '_update'
|
||||
DEFAULT_NAME = "aftership"
|
||||
UPDATE_TOPIC = DOMAIN + "_update"
|
||||
|
||||
ICON = 'mdi:package-variant-closed'
|
||||
ICON = "mdi:package-variant-closed"
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
|
||||
|
||||
SERVICE_ADD_TRACKING = 'add_tracking'
|
||||
SERVICE_REMOVE_TRACKING = 'remove_tracking'
|
||||
SERVICE_ADD_TRACKING = "add_tracking"
|
||||
SERVICE_REMOVE_TRACKING = "remove_tracking"
|
||||
|
||||
ADD_TRACKING_SERVICE_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -43,18 +43,18 @@ ADD_TRACKING_SERVICE_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
REMOVE_TRACKING_SERVICE_SCHEMA = vol.Schema(
|
||||
{vol.Required(CONF_SLUG): cv.string,
|
||||
vol.Required(CONF_TRACKING_NUMBER): cv.string}
|
||||
{vol.Required(CONF_SLUG): cv.string, vol.Required(CONF_TRACKING_NUMBER): cv.string}
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass, config, async_add_entities, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up the AfterShip sensor platform."""
|
||||
from pyaftership.tracker import Tracking
|
||||
|
||||
@@ -66,9 +66,10 @@ async def async_setup_platform(
|
||||
|
||||
await aftership.get_trackings()
|
||||
|
||||
if not aftership.meta or aftership.meta['code'] != 200:
|
||||
_LOGGER.error("No tracking data found. Check API key is correct: %s",
|
||||
aftership.meta)
|
||||
if not aftership.meta or aftership.meta["code"] != 200:
|
||||
_LOGGER.error(
|
||||
"No tracking data found. Check API key is correct: %s", aftership.meta
|
||||
)
|
||||
return
|
||||
|
||||
instance = AfterShipSensor(aftership, name)
|
||||
@@ -130,7 +131,7 @@ class AfterShipSensor(Entity):
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
"""Return the unit of measurement of this entity, if any."""
|
||||
return 'packages'
|
||||
return "packages"
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
@@ -145,7 +146,8 @@ class AfterShipSensor(Entity):
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
self.hass.helpers.dispatcher.async_dispatcher_connect(
|
||||
UPDATE_TOPIC, self.force_update)
|
||||
UPDATE_TOPIC, self.force_update
|
||||
)
|
||||
|
||||
async def force_update(self):
|
||||
"""Force update of data."""
|
||||
@@ -160,35 +162,40 @@ class AfterShipSensor(Entity):
|
||||
if not self.aftership.meta:
|
||||
_LOGGER.error("Unknown errors when querying")
|
||||
return
|
||||
if self.aftership.meta['code'] != 200:
|
||||
if self.aftership.meta["code"] != 200:
|
||||
_LOGGER.error(
|
||||
"Errors when querying AfterShip. %s", str(self.aftership.meta))
|
||||
"Errors when querying AfterShip. %s", str(self.aftership.meta)
|
||||
)
|
||||
return
|
||||
|
||||
status_to_ignore = {'delivered'}
|
||||
status_to_ignore = {"delivered"}
|
||||
status_counts = {}
|
||||
trackings = []
|
||||
not_delivered_count = 0
|
||||
|
||||
for track in self.aftership.trackings['trackings']:
|
||||
status = track['tag'].lower()
|
||||
for track in self.aftership.trackings["trackings"]:
|
||||
status = track["tag"].lower()
|
||||
name = (
|
||||
track['tracking_number']
|
||||
if track['title'] is None
|
||||
else track['title']
|
||||
track["tracking_number"] if track["title"] is None else track["title"]
|
||||
)
|
||||
last_checkpoint = (
|
||||
"Shipment pending"
|
||||
if track["tag"] == "Pending"
|
||||
else track["checkpoints"][-1]
|
||||
)
|
||||
status_counts[status] = status_counts.get(status, 0) + 1
|
||||
trackings.append({
|
||||
'name': name,
|
||||
'tracking_number': track['tracking_number'],
|
||||
'slug': track['slug'],
|
||||
'link': '%s%s/%s' %
|
||||
(BASE, track['slug'], track['tracking_number']),
|
||||
'last_update': track['updated_at'],
|
||||
'expected_delivery': track['expected_delivery'],
|
||||
'status': track['tag'],
|
||||
'last_checkpoint': track['checkpoints'][-1]
|
||||
})
|
||||
trackings.append(
|
||||
{
|
||||
"name": name,
|
||||
"tracking_number": track["tracking_number"],
|
||||
"slug": track["slug"],
|
||||
"link": "%s%s/%s" % (BASE, track["slug"], track["tracking_number"]),
|
||||
"last_update": track["updated_at"],
|
||||
"expected_delivery": track["expected_delivery"],
|
||||
"status": track["tag"],
|
||||
"last_checkpoint": last_checkpoint,
|
||||
}
|
||||
)
|
||||
|
||||
if status not in status_to_ignore:
|
||||
not_delivered_count += 1
|
||||
|
||||
@@ -4,50 +4,53 @@ import logging
|
||||
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.config_validation import ( # noqa
|
||||
PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE)
|
||||
PLATFORM_SCHEMA,
|
||||
PLATFORM_SCHEMA_BASE,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_AQI = 'air_quality_index'
|
||||
ATTR_ATTRIBUTION = 'attribution'
|
||||
ATTR_CO2 = 'carbon_dioxide'
|
||||
ATTR_CO = 'carbon_monoxide'
|
||||
ATTR_N2O = 'nitrogen_oxide'
|
||||
ATTR_NO = 'nitrogen_monoxide'
|
||||
ATTR_NO2 = 'nitrogen_dioxide'
|
||||
ATTR_OZONE = 'ozone'
|
||||
ATTR_PM_0_1 = 'particulate_matter_0_1'
|
||||
ATTR_PM_10 = 'particulate_matter_10'
|
||||
ATTR_PM_2_5 = 'particulate_matter_2_5'
|
||||
ATTR_SO2 = 'sulphur_dioxide'
|
||||
ATTR_AQI = "air_quality_index"
|
||||
ATTR_ATTRIBUTION = "attribution"
|
||||
ATTR_CO2 = "carbon_dioxide"
|
||||
ATTR_CO = "carbon_monoxide"
|
||||
ATTR_N2O = "nitrogen_oxide"
|
||||
ATTR_NO = "nitrogen_monoxide"
|
||||
ATTR_NO2 = "nitrogen_dioxide"
|
||||
ATTR_OZONE = "ozone"
|
||||
ATTR_PM_0_1 = "particulate_matter_0_1"
|
||||
ATTR_PM_10 = "particulate_matter_10"
|
||||
ATTR_PM_2_5 = "particulate_matter_2_5"
|
||||
ATTR_SO2 = "sulphur_dioxide"
|
||||
|
||||
DOMAIN = 'air_quality'
|
||||
DOMAIN = "air_quality"
|
||||
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
PROP_TO_ATTR = {
|
||||
'air_quality_index': ATTR_AQI,
|
||||
'attribution': ATTR_ATTRIBUTION,
|
||||
'carbon_dioxide': ATTR_CO2,
|
||||
'carbon_monoxide': ATTR_CO,
|
||||
'nitrogen_oxide': ATTR_N2O,
|
||||
'nitrogen_monoxide': ATTR_NO,
|
||||
'nitrogen_dioxide': ATTR_NO2,
|
||||
'ozone': ATTR_OZONE,
|
||||
'particulate_matter_0_1': ATTR_PM_0_1,
|
||||
'particulate_matter_10': ATTR_PM_10,
|
||||
'particulate_matter_2_5': ATTR_PM_2_5,
|
||||
'sulphur_dioxide': ATTR_SO2,
|
||||
"air_quality_index": ATTR_AQI,
|
||||
"attribution": ATTR_ATTRIBUTION,
|
||||
"carbon_dioxide": ATTR_CO2,
|
||||
"carbon_monoxide": ATTR_CO,
|
||||
"nitrogen_oxide": ATTR_N2O,
|
||||
"nitrogen_monoxide": ATTR_NO,
|
||||
"nitrogen_dioxide": ATTR_NO2,
|
||||
"ozone": ATTR_OZONE,
|
||||
"particulate_matter_0_1": ATTR_PM_0_1,
|
||||
"particulate_matter_10": ATTR_PM_10,
|
||||
"particulate_matter_2_5": ATTR_PM_2_5,
|
||||
"sulphur_dioxide": ATTR_SO2,
|
||||
}
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Set up the air quality component."""
|
||||
component = hass.data[DOMAIN] = EntityComponent(
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL)
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
await component.async_setup(config)
|
||||
return True
|
||||
|
||||
|
||||
@@ -6,118 +6,96 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
ATTR_ATTRIBUTION, ATTR_LATITUDE, ATTR_LONGITUDE, CONF_API_KEY,
|
||||
CONF_LATITUDE, CONF_LONGITUDE, CONF_MONITORED_CONDITIONS,
|
||||
CONF_SCAN_INTERVAL, CONF_STATE, CONF_SHOW_ON_MAP)
|
||||
ATTR_ATTRIBUTION,
|
||||
ATTR_LATITUDE,
|
||||
ATTR_LONGITUDE,
|
||||
CONF_API_KEY,
|
||||
CONF_LATITUDE,
|
||||
CONF_LONGITUDE,
|
||||
CONF_MONITORED_CONDITIONS,
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_STATE,
|
||||
CONF_SHOW_ON_MAP,
|
||||
)
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
_LOGGER = getLogger(__name__)
|
||||
|
||||
ATTR_CITY = 'city'
|
||||
ATTR_COUNTRY = 'country'
|
||||
ATTR_POLLUTANT_SYMBOL = 'pollutant_symbol'
|
||||
ATTR_POLLUTANT_UNIT = 'pollutant_unit'
|
||||
ATTR_REGION = 'region'
|
||||
ATTR_CITY = "city"
|
||||
ATTR_COUNTRY = "country"
|
||||
ATTR_POLLUTANT_SYMBOL = "pollutant_symbol"
|
||||
ATTR_POLLUTANT_UNIT = "pollutant_unit"
|
||||
ATTR_REGION = "region"
|
||||
|
||||
CONF_CITY = 'city'
|
||||
CONF_COUNTRY = 'country'
|
||||
CONF_CITY = "city"
|
||||
CONF_COUNTRY = "country"
|
||||
|
||||
DEFAULT_ATTRIBUTION = "Data provided by AirVisual"
|
||||
DEFAULT_SCAN_INTERVAL = timedelta(minutes=10)
|
||||
|
||||
MASS_PARTS_PER_MILLION = 'ppm'
|
||||
MASS_PARTS_PER_BILLION = 'ppb'
|
||||
VOLUME_MICROGRAMS_PER_CUBIC_METER = 'µg/m3'
|
||||
MASS_PARTS_PER_MILLION = "ppm"
|
||||
MASS_PARTS_PER_BILLION = "ppb"
|
||||
VOLUME_MICROGRAMS_PER_CUBIC_METER = "µg/m3"
|
||||
|
||||
SENSOR_TYPE_LEVEL = 'air_pollution_level'
|
||||
SENSOR_TYPE_AQI = 'air_quality_index'
|
||||
SENSOR_TYPE_POLLUTANT = 'main_pollutant'
|
||||
SENSOR_TYPE_LEVEL = "air_pollution_level"
|
||||
SENSOR_TYPE_AQI = "air_quality_index"
|
||||
SENSOR_TYPE_POLLUTANT = "main_pollutant"
|
||||
SENSORS = [
|
||||
(SENSOR_TYPE_LEVEL, 'Air Pollution Level', 'mdi:gauge', None),
|
||||
(SENSOR_TYPE_AQI, 'Air Quality Index', 'mdi:chart-line', 'AQI'),
|
||||
(SENSOR_TYPE_POLLUTANT, 'Main Pollutant', 'mdi:chemical-weapon', None),
|
||||
(SENSOR_TYPE_LEVEL, "Air Pollution Level", "mdi:gauge", None),
|
||||
(SENSOR_TYPE_AQI, "Air Quality Index", "mdi:chart-line", "AQI"),
|
||||
(SENSOR_TYPE_POLLUTANT, "Main Pollutant", "mdi:chemical-weapon", None),
|
||||
]
|
||||
|
||||
POLLUTANT_LEVEL_MAPPING = [{
|
||||
'label': 'Good',
|
||||
'icon': 'mdi:emoticon-excited',
|
||||
'minimum': 0,
|
||||
'maximum': 50
|
||||
}, {
|
||||
'label': 'Moderate',
|
||||
'icon': 'mdi:emoticon-happy',
|
||||
'minimum': 51,
|
||||
'maximum': 100
|
||||
}, {
|
||||
'label': 'Unhealthy for sensitive groups',
|
||||
'icon': 'mdi:emoticon-neutral',
|
||||
'minimum': 101,
|
||||
'maximum': 150
|
||||
}, {
|
||||
'label': 'Unhealthy',
|
||||
'icon': 'mdi:emoticon-sad',
|
||||
'minimum': 151,
|
||||
'maximum': 200
|
||||
}, {
|
||||
'label': 'Very Unhealthy',
|
||||
'icon': 'mdi:emoticon-dead',
|
||||
'minimum': 201,
|
||||
'maximum': 300
|
||||
}, {
|
||||
'label': 'Hazardous',
|
||||
'icon': 'mdi:biohazard',
|
||||
'minimum': 301,
|
||||
'maximum': 10000
|
||||
}]
|
||||
POLLUTANT_LEVEL_MAPPING = [
|
||||
{"label": "Good", "icon": "mdi:emoticon-excited", "minimum": 0, "maximum": 50},
|
||||
{"label": "Moderate", "icon": "mdi:emoticon-happy", "minimum": 51, "maximum": 100},
|
||||
{
|
||||
"label": "Unhealthy for sensitive groups",
|
||||
"icon": "mdi:emoticon-neutral",
|
||||
"minimum": 101,
|
||||
"maximum": 150,
|
||||
},
|
||||
{"label": "Unhealthy", "icon": "mdi:emoticon-sad", "minimum": 151, "maximum": 200},
|
||||
{
|
||||
"label": "Very Unhealthy",
|
||||
"icon": "mdi:emoticon-dead",
|
||||
"minimum": 201,
|
||||
"maximum": 300,
|
||||
},
|
||||
{"label": "Hazardous", "icon": "mdi:biohazard", "minimum": 301, "maximum": 10000},
|
||||
]
|
||||
|
||||
POLLUTANT_MAPPING = {
|
||||
'co': {
|
||||
'label': 'Carbon Monoxide',
|
||||
'unit': MASS_PARTS_PER_MILLION
|
||||
},
|
||||
'n2': {
|
||||
'label': 'Nitrogen Dioxide',
|
||||
'unit': MASS_PARTS_PER_BILLION
|
||||
},
|
||||
'o3': {
|
||||
'label': 'Ozone',
|
||||
'unit': MASS_PARTS_PER_BILLION
|
||||
},
|
||||
'p1': {
|
||||
'label': 'PM10',
|
||||
'unit': VOLUME_MICROGRAMS_PER_CUBIC_METER
|
||||
},
|
||||
'p2': {
|
||||
'label': 'PM2.5',
|
||||
'unit': VOLUME_MICROGRAMS_PER_CUBIC_METER
|
||||
},
|
||||
's2': {
|
||||
'label': 'Sulfur Dioxide',
|
||||
'unit': MASS_PARTS_PER_BILLION
|
||||
},
|
||||
"co": {"label": "Carbon Monoxide", "unit": MASS_PARTS_PER_MILLION},
|
||||
"n2": {"label": "Nitrogen Dioxide", "unit": MASS_PARTS_PER_BILLION},
|
||||
"o3": {"label": "Ozone", "unit": MASS_PARTS_PER_BILLION},
|
||||
"p1": {"label": "PM10", "unit": VOLUME_MICROGRAMS_PER_CUBIC_METER},
|
||||
"p2": {"label": "PM2.5", "unit": VOLUME_MICROGRAMS_PER_CUBIC_METER},
|
||||
"s2": {"label": "Sulfur Dioxide", "unit": MASS_PARTS_PER_BILLION},
|
||||
}
|
||||
|
||||
SENSOR_LOCALES = {'cn': 'Chinese', 'us': 'U.S.'}
|
||||
SENSOR_LOCALES = {"cn": "Chinese", "us": "U.S."}
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_LOCALES)):
|
||||
vol.All(cv.ensure_list, [vol.In(SENSOR_LOCALES)]),
|
||||
vol.Inclusive(CONF_CITY, 'city'): cv.string,
|
||||
vol.Inclusive(CONF_COUNTRY, 'city'): cv.string,
|
||||
vol.Inclusive(CONF_LATITUDE, 'coords'): cv.latitude,
|
||||
vol.Inclusive(CONF_LONGITUDE, 'coords'): cv.longitude,
|
||||
vol.Optional(CONF_SHOW_ON_MAP, default=True): cv.boolean,
|
||||
vol.Inclusive(CONF_STATE, 'city'): cv.string,
|
||||
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL):
|
||||
cv.time_period
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): cv.string,
|
||||
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_LOCALES)): vol.All(
|
||||
cv.ensure_list, [vol.In(SENSOR_LOCALES)]
|
||||
),
|
||||
vol.Inclusive(CONF_CITY, "city"): cv.string,
|
||||
vol.Inclusive(CONF_COUNTRY, "city"): cv.string,
|
||||
vol.Inclusive(CONF_LATITUDE, "coords"): cv.latitude,
|
||||
vol.Inclusive(CONF_LONGITUDE, "coords"): cv.longitude,
|
||||
vol.Optional(CONF_SHOW_ON_MAP, default=True): cv.boolean,
|
||||
vol.Inclusive(CONF_STATE, "city"): cv.string,
|
||||
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_SCAN_INTERVAL): cv.time_period,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass, config, async_add_entities, discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Configure the platform and add the sensors."""
|
||||
from pyairvisual import Client
|
||||
|
||||
@@ -132,25 +110,27 @@ async def async_setup_platform(
|
||||
|
||||
if city and state and country:
|
||||
_LOGGER.debug(
|
||||
"Using city, state, and country: %s, %s, %s", city, state, country)
|
||||
location_id = ','.join((city, state, country))
|
||||
"Using city, state, and country: %s, %s, %s", city, state, country
|
||||
)
|
||||
location_id = ",".join((city, state, country))
|
||||
data = AirVisualData(
|
||||
Client(websession, api_key=config[CONF_API_KEY]),
|
||||
city=city,
|
||||
state=state,
|
||||
country=country,
|
||||
show_on_map=config[CONF_SHOW_ON_MAP],
|
||||
scan_interval=config[CONF_SCAN_INTERVAL])
|
||||
scan_interval=config[CONF_SCAN_INTERVAL],
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Using latitude and longitude: %s, %s", latitude, longitude)
|
||||
location_id = ','.join((str(latitude), str(longitude)))
|
||||
_LOGGER.debug("Using latitude and longitude: %s, %s", latitude, longitude)
|
||||
location_id = ",".join((str(latitude), str(longitude)))
|
||||
data = AirVisualData(
|
||||
Client(websession, api_key=config[CONF_API_KEY]),
|
||||
latitude=latitude,
|
||||
longitude=longitude,
|
||||
show_on_map=config[CONF_SHOW_ON_MAP],
|
||||
scan_interval=config[CONF_SCAN_INTERVAL])
|
||||
scan_interval=config[CONF_SCAN_INTERVAL],
|
||||
)
|
||||
|
||||
await data.async_update()
|
||||
|
||||
@@ -158,8 +138,8 @@ async def async_setup_platform(
|
||||
for locale in config[CONF_MONITORED_CONDITIONS]:
|
||||
for kind, name, icon, unit in SENSORS:
|
||||
sensors.append(
|
||||
AirVisualSensor(
|
||||
data, kind, name, icon, unit, locale, location_id))
|
||||
AirVisualSensor(data, kind, name, icon, unit, locale, location_id)
|
||||
)
|
||||
|
||||
async_add_entities(sensors, True)
|
||||
|
||||
@@ -186,8 +166,8 @@ class AirVisualSensor(Entity):
|
||||
self._attrs[ATTR_LATITUDE] = self.airvisual.latitude
|
||||
self._attrs[ATTR_LONGITUDE] = self.airvisual.longitude
|
||||
else:
|
||||
self._attrs['lati'] = self.airvisual.latitude
|
||||
self._attrs['long'] = self.airvisual.longitude
|
||||
self._attrs["lati"] = self.airvisual.latitude
|
||||
self._attrs["long"] = self.airvisual.longitude
|
||||
|
||||
return self._attrs
|
||||
|
||||
@@ -204,7 +184,7 @@ class AirVisualSensor(Entity):
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name."""
|
||||
return '{0} {1}'.format(SENSOR_LOCALES[self._locale], self._name)
|
||||
return "{0} {1}".format(SENSOR_LOCALES[self._locale], self._name)
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
@@ -214,8 +194,7 @@ class AirVisualSensor(Entity):
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique, HASS-friendly identifier for this entity."""
|
||||
return '{0}_{1}_{2}'.format(
|
||||
self._location_id, self._locale, self._type)
|
||||
return "{0}_{1}_{2}".format(self._location_id, self._locale, self._type)
|
||||
|
||||
@property
|
||||
def unit_of_measurement(self):
|
||||
@@ -231,22 +210,25 @@ class AirVisualSensor(Entity):
|
||||
return
|
||||
|
||||
if self._type == SENSOR_TYPE_LEVEL:
|
||||
aqi = data['aqi{0}'.format(self._locale)]
|
||||
aqi = data["aqi{0}".format(self._locale)]
|
||||
[level] = [
|
||||
i for i in POLLUTANT_LEVEL_MAPPING
|
||||
if i['minimum'] <= aqi <= i['maximum']
|
||||
i
|
||||
for i in POLLUTANT_LEVEL_MAPPING
|
||||
if i["minimum"] <= aqi <= i["maximum"]
|
||||
]
|
||||
self._state = level['label']
|
||||
self._icon = level['icon']
|
||||
self._state = level["label"]
|
||||
self._icon = level["icon"]
|
||||
elif self._type == SENSOR_TYPE_AQI:
|
||||
self._state = data['aqi{0}'.format(self._locale)]
|
||||
self._state = data["aqi{0}".format(self._locale)]
|
||||
elif self._type == SENSOR_TYPE_POLLUTANT:
|
||||
symbol = data['main{0}'.format(self._locale)]
|
||||
self._state = POLLUTANT_MAPPING[symbol]['label']
|
||||
self._attrs.update({
|
||||
ATTR_POLLUTANT_SYMBOL: symbol,
|
||||
ATTR_POLLUTANT_UNIT: POLLUTANT_MAPPING[symbol]['unit']
|
||||
})
|
||||
symbol = data["main{0}".format(self._locale)]
|
||||
self._state = POLLUTANT_MAPPING[symbol]["label"]
|
||||
self._attrs.update(
|
||||
{
|
||||
ATTR_POLLUTANT_SYMBOL: symbol,
|
||||
ATTR_POLLUTANT_UNIT: POLLUTANT_MAPPING[symbol]["unit"],
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class AirVisualData:
|
||||
@@ -263,8 +245,7 @@ class AirVisualData:
|
||||
self.show_on_map = kwargs.get(CONF_SHOW_ON_MAP)
|
||||
self.state = kwargs.get(CONF_STATE)
|
||||
|
||||
self.async_update = Throttle(
|
||||
kwargs[CONF_SCAN_INTERVAL])(self._async_update)
|
||||
self.async_update = Throttle(kwargs[CONF_SCAN_INTERVAL])(self._async_update)
|
||||
|
||||
async def _async_update(self):
|
||||
"""Update AirVisual data."""
|
||||
@@ -272,23 +253,21 @@ class AirVisualData:
|
||||
|
||||
try:
|
||||
if self.city and self.state and self.country:
|
||||
resp = await self._client.api.city(
|
||||
self.city, self.state, self.country)
|
||||
self.longitude, self.latitude = resp['location']['coordinates']
|
||||
resp = await self._client.api.city(self.city, self.state, self.country)
|
||||
self.longitude, self.latitude = resp["location"]["coordinates"]
|
||||
else:
|
||||
resp = await self._client.api.nearest_city(
|
||||
self.latitude, self.longitude)
|
||||
self.latitude, self.longitude
|
||||
)
|
||||
|
||||
_LOGGER.debug("New data retrieved: %s", resp)
|
||||
|
||||
self.pollution_info = resp['current']['pollution']
|
||||
self.pollution_info = resp["current"]["pollution"]
|
||||
except (KeyError, AirVisualError) as err:
|
||||
if self.city and self.state and self.country:
|
||||
location = (self.city, self.state, self.country)
|
||||
else:
|
||||
location = (self.latitude, self.longitude)
|
||||
|
||||
_LOGGER.error(
|
||||
"Can't retrieve data for location: %s (%s)", location,
|
||||
err)
|
||||
_LOGGER.error("Can't retrieve data for location: %s (%s)", location, err)
|
||||
self.pollution_info = {}
|
||||
|
||||
@@ -3,30 +3,39 @@ import logging
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.cover import (CoverDevice, PLATFORM_SCHEMA,
|
||||
SUPPORT_OPEN, SUPPORT_CLOSE)
|
||||
from homeassistant.const import (CONF_USERNAME, CONF_PASSWORD, STATE_CLOSED,
|
||||
STATE_OPENING, STATE_CLOSING, STATE_OPEN)
|
||||
from homeassistant.components.cover import (
|
||||
CoverDevice,
|
||||
PLATFORM_SCHEMA,
|
||||
SUPPORT_OPEN,
|
||||
SUPPORT_CLOSE,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_USERNAME,
|
||||
CONF_PASSWORD,
|
||||
STATE_CLOSED,
|
||||
STATE_OPENING,
|
||||
STATE_CLOSING,
|
||||
STATE_OPEN,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
NOTIFICATION_ID = 'aladdin_notification'
|
||||
NOTIFICATION_TITLE = 'Aladdin Connect Cover Setup'
|
||||
NOTIFICATION_ID = "aladdin_notification"
|
||||
NOTIFICATION_TITLE = "Aladdin Connect Cover Setup"
|
||||
|
||||
STATES_MAP = {
|
||||
'open': STATE_OPEN,
|
||||
'opening': STATE_OPENING,
|
||||
'closed': STATE_CLOSED,
|
||||
'closing': STATE_CLOSING
|
||||
"open": STATE_OPEN,
|
||||
"opening": STATE_OPENING,
|
||||
"closed": STATE_CLOSED,
|
||||
"closing": STATE_CLOSING,
|
||||
}
|
||||
|
||||
SUPPORTED_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Required(CONF_PASSWORD): cv.string
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -44,11 +53,12 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
except (TypeError, KeyError, NameError, ValueError) as ex:
|
||||
_LOGGER.error("%s", ex)
|
||||
hass.components.persistent_notification.create(
|
||||
'Error: {}<br />'
|
||||
'You will need to restart hass after fixing.'
|
||||
''.format(ex),
|
||||
"Error: {}<br />"
|
||||
"You will need to restart hass after fixing."
|
||||
"".format(ex),
|
||||
title=NOTIFICATION_TITLE,
|
||||
notification_id=NOTIFICATION_ID)
|
||||
notification_id=NOTIFICATION_ID,
|
||||
)
|
||||
|
||||
|
||||
class AladdinDevice(CoverDevice):
|
||||
@@ -57,15 +67,15 @@ class AladdinDevice(CoverDevice):
|
||||
def __init__(self, acc, device):
|
||||
"""Initialize the cover."""
|
||||
self._acc = acc
|
||||
self._device_id = device['device_id']
|
||||
self._number = device['door_number']
|
||||
self._name = device['name']
|
||||
self._status = STATES_MAP.get(device['status'])
|
||||
self._device_id = device["device_id"]
|
||||
self._number = device["door_number"]
|
||||
self._name = device["name"]
|
||||
self._status = STATES_MAP.get(device["status"])
|
||||
|
||||
@property
|
||||
def device_class(self):
|
||||
"""Define this cover as a garage door."""
|
||||
return 'garage'
|
||||
return "garage"
|
||||
|
||||
@property
|
||||
def supported_features(self):
|
||||
@@ -75,7 +85,7 @@ class AladdinDevice(CoverDevice):
|
||||
@property
|
||||
def unique_id(self):
|
||||
"""Return a unique ID."""
|
||||
return '{}-{}'.format(self._device_id, self._number)
|
||||
return "{}-{}".format(self._device_id, self._number)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
|
||||
@@ -5,59 +5,65 @@ import logging
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_CODE, ATTR_CODE_FORMAT, ATTR_ENTITY_ID, SERVICE_ALARM_TRIGGER,
|
||||
SERVICE_ALARM_DISARM, SERVICE_ALARM_ARM_HOME, SERVICE_ALARM_ARM_AWAY,
|
||||
SERVICE_ALARM_ARM_NIGHT, SERVICE_ALARM_ARM_CUSTOM_BYPASS)
|
||||
ATTR_CODE,
|
||||
ATTR_CODE_FORMAT,
|
||||
SERVICE_ALARM_TRIGGER,
|
||||
SERVICE_ALARM_DISARM,
|
||||
SERVICE_ALARM_ARM_HOME,
|
||||
SERVICE_ALARM_ARM_AWAY,
|
||||
SERVICE_ALARM_ARM_NIGHT,
|
||||
SERVICE_ALARM_ARM_CUSTOM_BYPASS,
|
||||
)
|
||||
from homeassistant.helpers.config_validation import ( # noqa
|
||||
PLATFORM_SCHEMA, PLATFORM_SCHEMA_BASE)
|
||||
ENTITY_SERVICE_SCHEMA,
|
||||
PLATFORM_SCHEMA,
|
||||
PLATFORM_SCHEMA_BASE,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
|
||||
DOMAIN = 'alarm_control_panel'
|
||||
DOMAIN = "alarm_control_panel"
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
ATTR_CHANGED_BY = 'changed_by'
|
||||
FORMAT_TEXT = 'text'
|
||||
FORMAT_NUMBER = 'number'
|
||||
ATTR_CHANGED_BY = "changed_by"
|
||||
FORMAT_TEXT = "text"
|
||||
FORMAT_NUMBER = "number"
|
||||
ATTR_CODE_ARM_REQUIRED = "code_arm_required"
|
||||
|
||||
ENTITY_ID_FORMAT = DOMAIN + '.{}'
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
|
||||
ALARM_SERVICE_SCHEMA = vol.Schema({
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids,
|
||||
vol.Optional(ATTR_CODE): cv.string,
|
||||
})
|
||||
ALARM_SERVICE_SCHEMA = ENTITY_SERVICE_SCHEMA.extend(
|
||||
{vol.Optional(ATTR_CODE): cv.string}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass, config):
|
||||
"""Track states and offer events for sensors."""
|
||||
component = hass.data[DOMAIN] = EntityComponent(
|
||||
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL)
|
||||
logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
|
||||
await component.async_setup(config)
|
||||
|
||||
component.async_register_entity_service(
|
||||
SERVICE_ALARM_DISARM, ALARM_SERVICE_SCHEMA,
|
||||
'async_alarm_disarm'
|
||||
SERVICE_ALARM_DISARM, ALARM_SERVICE_SCHEMA, "async_alarm_disarm"
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_ALARM_ARM_HOME, ALARM_SERVICE_SCHEMA,
|
||||
'async_alarm_arm_home'
|
||||
SERVICE_ALARM_ARM_HOME, ALARM_SERVICE_SCHEMA, "async_alarm_arm_home"
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_ALARM_ARM_AWAY, ALARM_SERVICE_SCHEMA,
|
||||
'async_alarm_arm_away'
|
||||
SERVICE_ALARM_ARM_AWAY, ALARM_SERVICE_SCHEMA, "async_alarm_arm_away"
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_ALARM_ARM_NIGHT, ALARM_SERVICE_SCHEMA,
|
||||
'async_alarm_arm_night'
|
||||
SERVICE_ALARM_ARM_NIGHT, ALARM_SERVICE_SCHEMA, "async_alarm_arm_night"
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_ALARM_ARM_CUSTOM_BYPASS, ALARM_SERVICE_SCHEMA,
|
||||
'async_alarm_arm_custom_bypass'
|
||||
SERVICE_ALARM_ARM_CUSTOM_BYPASS,
|
||||
ALARM_SERVICE_SCHEMA,
|
||||
"async_alarm_arm_custom_bypass",
|
||||
)
|
||||
component.async_register_entity_service(
|
||||
SERVICE_ALARM_TRIGGER, ALARM_SERVICE_SCHEMA,
|
||||
'async_alarm_trigger'
|
||||
SERVICE_ALARM_TRIGGER, ALARM_SERVICE_SCHEMA, "async_alarm_trigger"
|
||||
)
|
||||
|
||||
return True
|
||||
@@ -87,6 +93,11 @@ class AlarmControlPanel(Entity):
|
||||
"""Last change triggered by."""
|
||||
return None
|
||||
|
||||
@property
|
||||
def code_arm_required(self):
|
||||
"""Whether the code is required for arm actions."""
|
||||
return True
|
||||
|
||||
def alarm_disarm(self, code=None):
|
||||
"""Send disarm command."""
|
||||
raise NotImplementedError()
|
||||
@@ -151,14 +162,14 @@ class AlarmControlPanel(Entity):
|
||||
|
||||
This method must be run in the event loop and returns a coroutine.
|
||||
"""
|
||||
return self.hass.async_add_executor_job(
|
||||
self.alarm_arm_custom_bypass, code)
|
||||
return self.hass.async_add_executor_job(self.alarm_arm_custom_bypass, code)
|
||||
|
||||
@property
|
||||
def state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
state_attr = {
|
||||
ATTR_CODE_FORMAT: self.code_format,
|
||||
ATTR_CHANGED_BY: self.changed_by
|
||||
ATTR_CHANGED_BY: self.changed_by,
|
||||
ATTR_CODE_ARM_REQUIRED: self.code_arm_required,
|
||||
}
|
||||
return state_attr
|
||||
|
||||
@@ -12,85 +12,105 @@ from homeassistant.components.binary_sensor import DEVICE_CLASSES_SCHEMA
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = 'alarmdecoder'
|
||||
DOMAIN = "alarmdecoder"
|
||||
|
||||
DATA_AD = 'alarmdecoder'
|
||||
DATA_AD = "alarmdecoder"
|
||||
|
||||
CONF_DEVICE = 'device'
|
||||
CONF_DEVICE_BAUD = 'baudrate'
|
||||
CONF_DEVICE_PATH = 'path'
|
||||
CONF_DEVICE_PORT = 'port'
|
||||
CONF_DEVICE_TYPE = 'type'
|
||||
CONF_PANEL_DISPLAY = 'panel_display'
|
||||
CONF_ZONE_NAME = 'name'
|
||||
CONF_ZONE_TYPE = 'type'
|
||||
CONF_ZONE_LOOP = 'loop'
|
||||
CONF_ZONE_RFID = 'rfid'
|
||||
CONF_ZONES = 'zones'
|
||||
CONF_RELAY_ADDR = 'relayaddr'
|
||||
CONF_RELAY_CHAN = 'relaychan'
|
||||
CONF_DEVICE = "device"
|
||||
CONF_DEVICE_BAUD = "baudrate"
|
||||
CONF_DEVICE_PATH = "path"
|
||||
CONF_DEVICE_PORT = "port"
|
||||
CONF_DEVICE_TYPE = "type"
|
||||
CONF_PANEL_DISPLAY = "panel_display"
|
||||
CONF_ZONE_NAME = "name"
|
||||
CONF_ZONE_TYPE = "type"
|
||||
CONF_ZONE_LOOP = "loop"
|
||||
CONF_ZONE_RFID = "rfid"
|
||||
CONF_ZONES = "zones"
|
||||
CONF_RELAY_ADDR = "relayaddr"
|
||||
CONF_RELAY_CHAN = "relaychan"
|
||||
|
||||
DEFAULT_DEVICE_TYPE = 'socket'
|
||||
DEFAULT_DEVICE_HOST = 'localhost'
|
||||
DEFAULT_DEVICE_TYPE = "socket"
|
||||
DEFAULT_DEVICE_HOST = "localhost"
|
||||
DEFAULT_DEVICE_PORT = 10000
|
||||
DEFAULT_DEVICE_PATH = '/dev/ttyUSB0'
|
||||
DEFAULT_DEVICE_PATH = "/dev/ttyUSB0"
|
||||
DEFAULT_DEVICE_BAUD = 115200
|
||||
|
||||
DEFAULT_PANEL_DISPLAY = False
|
||||
|
||||
DEFAULT_ZONE_TYPE = 'opening'
|
||||
DEFAULT_ZONE_TYPE = "opening"
|
||||
|
||||
SIGNAL_PANEL_MESSAGE = 'alarmdecoder.panel_message'
|
||||
SIGNAL_PANEL_ARM_AWAY = 'alarmdecoder.panel_arm_away'
|
||||
SIGNAL_PANEL_ARM_HOME = 'alarmdecoder.panel_arm_home'
|
||||
SIGNAL_PANEL_DISARM = 'alarmdecoder.panel_disarm'
|
||||
SIGNAL_PANEL_MESSAGE = "alarmdecoder.panel_message"
|
||||
SIGNAL_PANEL_ARM_AWAY = "alarmdecoder.panel_arm_away"
|
||||
SIGNAL_PANEL_ARM_HOME = "alarmdecoder.panel_arm_home"
|
||||
SIGNAL_PANEL_DISARM = "alarmdecoder.panel_disarm"
|
||||
|
||||
SIGNAL_ZONE_FAULT = 'alarmdecoder.zone_fault'
|
||||
SIGNAL_ZONE_RESTORE = 'alarmdecoder.zone_restore'
|
||||
SIGNAL_RFX_MESSAGE = 'alarmdecoder.rfx_message'
|
||||
SIGNAL_REL_MESSAGE = 'alarmdecoder.rel_message'
|
||||
SIGNAL_ZONE_FAULT = "alarmdecoder.zone_fault"
|
||||
SIGNAL_ZONE_RESTORE = "alarmdecoder.zone_restore"
|
||||
SIGNAL_RFX_MESSAGE = "alarmdecoder.rfx_message"
|
||||
SIGNAL_REL_MESSAGE = "alarmdecoder.rel_message"
|
||||
|
||||
DEVICE_SOCKET_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_DEVICE_TYPE): 'socket',
|
||||
vol.Optional(CONF_HOST, default=DEFAULT_DEVICE_HOST): cv.string,
|
||||
vol.Optional(CONF_DEVICE_PORT, default=DEFAULT_DEVICE_PORT): cv.port})
|
||||
DEVICE_SOCKET_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DEVICE_TYPE): "socket",
|
||||
vol.Optional(CONF_HOST, default=DEFAULT_DEVICE_HOST): cv.string,
|
||||
vol.Optional(CONF_DEVICE_PORT, default=DEFAULT_DEVICE_PORT): cv.port,
|
||||
}
|
||||
)
|
||||
|
||||
DEVICE_SERIAL_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_DEVICE_TYPE): 'serial',
|
||||
vol.Optional(CONF_DEVICE_PATH, default=DEFAULT_DEVICE_PATH): cv.string,
|
||||
vol.Optional(CONF_DEVICE_BAUD, default=DEFAULT_DEVICE_BAUD): cv.string})
|
||||
DEVICE_SERIAL_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DEVICE_TYPE): "serial",
|
||||
vol.Optional(CONF_DEVICE_PATH, default=DEFAULT_DEVICE_PATH): cv.string,
|
||||
vol.Optional(CONF_DEVICE_BAUD, default=DEFAULT_DEVICE_BAUD): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
DEVICE_USB_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_DEVICE_TYPE): 'usb'})
|
||||
DEVICE_USB_SCHEMA = vol.Schema({vol.Required(CONF_DEVICE_TYPE): "usb"})
|
||||
|
||||
ZONE_SCHEMA = vol.Schema({
|
||||
vol.Required(CONF_ZONE_NAME): cv.string,
|
||||
vol.Optional(CONF_ZONE_TYPE,
|
||||
default=DEFAULT_ZONE_TYPE): vol.Any(DEVICE_CLASSES_SCHEMA),
|
||||
vol.Optional(CONF_ZONE_RFID): cv.string,
|
||||
vol.Optional(CONF_ZONE_LOOP):
|
||||
vol.All(vol.Coerce(int), vol.Range(min=1, max=4)),
|
||||
vol.Inclusive(CONF_RELAY_ADDR, 'relaylocation',
|
||||
'Relay address and channel must exist together'): cv.byte,
|
||||
vol.Inclusive(CONF_RELAY_CHAN, 'relaylocation',
|
||||
'Relay address and channel must exist together'): cv.byte})
|
||||
ZONE_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ZONE_NAME): cv.string,
|
||||
vol.Optional(CONF_ZONE_TYPE, default=DEFAULT_ZONE_TYPE): vol.Any(
|
||||
DEVICE_CLASSES_SCHEMA
|
||||
),
|
||||
vol.Optional(CONF_ZONE_RFID): cv.string,
|
||||
vol.Optional(CONF_ZONE_LOOP): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)),
|
||||
vol.Inclusive(
|
||||
CONF_RELAY_ADDR,
|
||||
"relaylocation",
|
||||
"Relay address and channel must exist together",
|
||||
): cv.byte,
|
||||
vol.Inclusive(
|
||||
CONF_RELAY_CHAN,
|
||||
"relaylocation",
|
||||
"Relay address and channel must exist together",
|
||||
): cv.byte,
|
||||
}
|
||||
)
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema({
|
||||
DOMAIN: vol.Schema({
|
||||
vol.Required(CONF_DEVICE): vol.Any(
|
||||
DEVICE_SOCKET_SCHEMA, DEVICE_SERIAL_SCHEMA,
|
||||
DEVICE_USB_SCHEMA),
|
||||
vol.Optional(CONF_PANEL_DISPLAY,
|
||||
default=DEFAULT_PANEL_DISPLAY): cv.boolean,
|
||||
vol.Optional(CONF_ZONES): {vol.Coerce(int): ZONE_SCHEMA},
|
||||
}),
|
||||
}, extra=vol.ALLOW_EXTRA)
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
DOMAIN: vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DEVICE): vol.Any(
|
||||
DEVICE_SOCKET_SCHEMA, DEVICE_SERIAL_SCHEMA, DEVICE_USB_SCHEMA
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_PANEL_DISPLAY, default=DEFAULT_PANEL_DISPLAY
|
||||
): cv.boolean,
|
||||
vol.Optional(CONF_ZONES): {vol.Coerce(int): ZONE_SCHEMA},
|
||||
}
|
||||
)
|
||||
},
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
|
||||
def setup(hass, config):
|
||||
"""Set up for the AlarmDecoder devices."""
|
||||
from alarmdecoder import AlarmDecoder
|
||||
from alarmdecoder.devices import (SocketDevice, SerialDevice, USBDevice)
|
||||
from alarmdecoder.devices import SocketDevice, SerialDevice, USBDevice
|
||||
|
||||
conf = config.get(DOMAIN)
|
||||
|
||||
@@ -115,13 +135,15 @@ def setup(hass, config):
|
||||
def open_connection(now=None):
|
||||
"""Open a connection to AlarmDecoder."""
|
||||
from alarmdecoder.util import NoDeviceError
|
||||
|
||||
nonlocal restart
|
||||
try:
|
||||
controller.open(baud)
|
||||
except NoDeviceError:
|
||||
_LOGGER.debug("Failed to connect. Retrying in 5 seconds")
|
||||
hass.helpers.event.track_point_in_time(
|
||||
open_connection, dt_util.utcnow() + timedelta(seconds=5))
|
||||
open_connection, dt_util.utcnow() + timedelta(seconds=5)
|
||||
)
|
||||
return
|
||||
_LOGGER.debug("Established a connection with the alarmdecoder")
|
||||
restart = True
|
||||
@@ -137,39 +159,34 @@ def setup(hass, config):
|
||||
|
||||
def handle_message(sender, message):
|
||||
"""Handle message from AlarmDecoder."""
|
||||
hass.helpers.dispatcher.dispatcher_send(
|
||||
SIGNAL_PANEL_MESSAGE, message)
|
||||
hass.helpers.dispatcher.dispatcher_send(SIGNAL_PANEL_MESSAGE, message)
|
||||
|
||||
def handle_rfx_message(sender, message):
|
||||
"""Handle RFX message from AlarmDecoder."""
|
||||
hass.helpers.dispatcher.dispatcher_send(
|
||||
SIGNAL_RFX_MESSAGE, message)
|
||||
hass.helpers.dispatcher.dispatcher_send(SIGNAL_RFX_MESSAGE, message)
|
||||
|
||||
def zone_fault_callback(sender, zone):
|
||||
"""Handle zone fault from AlarmDecoder."""
|
||||
hass.helpers.dispatcher.dispatcher_send(
|
||||
SIGNAL_ZONE_FAULT, zone)
|
||||
hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_FAULT, zone)
|
||||
|
||||
def zone_restore_callback(sender, zone):
|
||||
"""Handle zone restore from AlarmDecoder."""
|
||||
hass.helpers.dispatcher.dispatcher_send(
|
||||
SIGNAL_ZONE_RESTORE, zone)
|
||||
hass.helpers.dispatcher.dispatcher_send(SIGNAL_ZONE_RESTORE, zone)
|
||||
|
||||
def handle_rel_message(sender, message):
|
||||
"""Handle relay message from AlarmDecoder."""
|
||||
hass.helpers.dispatcher.dispatcher_send(
|
||||
SIGNAL_REL_MESSAGE, message)
|
||||
hass.helpers.dispatcher.dispatcher_send(SIGNAL_REL_MESSAGE, message)
|
||||
|
||||
controller = False
|
||||
if device_type == 'socket':
|
||||
if device_type == "socket":
|
||||
host = device.get(CONF_HOST)
|
||||
port = device.get(CONF_DEVICE_PORT)
|
||||
controller = AlarmDecoder(SocketDevice(interface=(host, port)))
|
||||
elif device_type == 'serial':
|
||||
elif device_type == "serial":
|
||||
path = device.get(CONF_DEVICE_PATH)
|
||||
baud = device.get(CONF_DEVICE_BAUD)
|
||||
controller = AlarmDecoder(SerialDevice(interface=path))
|
||||
elif device_type == 'usb':
|
||||
elif device_type == "usb":
|
||||
AlarmDecoder(USBDevice.find())
|
||||
return False
|
||||
|
||||
@@ -186,13 +203,12 @@ def setup(hass, config):
|
||||
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_alarmdecoder)
|
||||
|
||||
load_platform(hass, 'alarm_control_panel', DOMAIN, conf, config)
|
||||
load_platform(hass, "alarm_control_panel", DOMAIN, conf, config)
|
||||
|
||||
if zones:
|
||||
load_platform(
|
||||
hass, 'binary_sensor', DOMAIN, {CONF_ZONES: zones}, config)
|
||||
load_platform(hass, "binary_sensor", DOMAIN, {CONF_ZONES: zones}, config)
|
||||
|
||||
if display:
|
||||
load_platform(hass, 'sensor', DOMAIN, conf, config)
|
||||
load_platform(hass, "sensor", DOMAIN, conf, config)
|
||||
|
||||
return True
|
||||
|
||||
@@ -5,18 +5,20 @@ import voluptuous as vol
|
||||
|
||||
import homeassistant.components.alarm_control_panel as alarm
|
||||
from homeassistant.const import (
|
||||
ATTR_CODE, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED)
|
||||
ATTR_CODE,
|
||||
STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_DISARMED,
|
||||
STATE_ALARM_TRIGGERED,
|
||||
)
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
from . import DATA_AD, SIGNAL_PANEL_MESSAGE
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SERVICE_ALARM_TOGGLE_CHIME = 'alarmdecoder_alarm_toggle_chime'
|
||||
ALARM_TOGGLE_CHIME_SCHEMA = vol.Schema({
|
||||
vol.Required(ATTR_CODE): cv.string,
|
||||
})
|
||||
SERVICE_ALARM_TOGGLE_CHIME = "alarmdecoder_alarm_toggle_chime"
|
||||
ALARM_TOGGLE_CHIME_SCHEMA = vol.Schema({vol.Required(ATTR_CODE): cv.string})
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -30,8 +32,11 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
device.alarm_toggle_chime(code)
|
||||
|
||||
hass.services.register(
|
||||
alarm.DOMAIN, SERVICE_ALARM_TOGGLE_CHIME, alarm_toggle_chime_handler,
|
||||
schema=ALARM_TOGGLE_CHIME_SCHEMA)
|
||||
alarm.DOMAIN,
|
||||
SERVICE_ALARM_TOGGLE_CHIME,
|
||||
alarm_toggle_chime_handler,
|
||||
schema=ALARM_TOGGLE_CHIME_SCHEMA,
|
||||
)
|
||||
|
||||
|
||||
class AlarmDecoderAlarmPanel(alarm.AlarmControlPanel):
|
||||
@@ -55,7 +60,8 @@ class AlarmDecoderAlarmPanel(alarm.AlarmControlPanel):
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
self.hass.helpers.dispatcher.async_dispatcher_connect(
|
||||
SIGNAL_PANEL_MESSAGE, self._message_callback)
|
||||
SIGNAL_PANEL_MESSAGE, self._message_callback
|
||||
)
|
||||
|
||||
def _message_callback(self, message):
|
||||
"""Handle received messages."""
|
||||
@@ -104,15 +110,15 @@ class AlarmDecoderAlarmPanel(alarm.AlarmControlPanel):
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
'ac_power': self._ac_power,
|
||||
'backlight_on': self._backlight_on,
|
||||
'battery_low': self._battery_low,
|
||||
'check_zone': self._check_zone,
|
||||
'chime': self._chime,
|
||||
'entry_delay_off': self._entry_delay_off,
|
||||
'programming_mode': self._programming_mode,
|
||||
'ready': self._ready,
|
||||
'zone_bypassed': self._zone_bypassed,
|
||||
"ac_power": self._ac_power,
|
||||
"backlight_on": self._backlight_on,
|
||||
"battery_low": self._battery_low,
|
||||
"check_zone": self._check_zone,
|
||||
"chime": self._chime,
|
||||
"entry_delay_off": self._entry_delay_off,
|
||||
"programming_mode": self._programming_mode,
|
||||
"ready": self._ready,
|
||||
"zone_bypassed": self._zone_bypassed,
|
||||
}
|
||||
|
||||
def alarm_disarm(self, code=None):
|
||||
|
||||
@@ -4,20 +4,30 @@ import logging
|
||||
from homeassistant.components.binary_sensor import BinarySensorDevice
|
||||
|
||||
from . import (
|
||||
CONF_RELAY_ADDR, CONF_RELAY_CHAN, CONF_ZONE_LOOP, CONF_ZONE_NAME,
|
||||
CONF_ZONE_RFID, CONF_ZONE_TYPE, CONF_ZONES, SIGNAL_REL_MESSAGE,
|
||||
SIGNAL_RFX_MESSAGE, SIGNAL_ZONE_FAULT, SIGNAL_ZONE_RESTORE, ZONE_SCHEMA)
|
||||
CONF_RELAY_ADDR,
|
||||
CONF_RELAY_CHAN,
|
||||
CONF_ZONE_LOOP,
|
||||
CONF_ZONE_NAME,
|
||||
CONF_ZONE_RFID,
|
||||
CONF_ZONE_TYPE,
|
||||
CONF_ZONES,
|
||||
SIGNAL_REL_MESSAGE,
|
||||
SIGNAL_RFX_MESSAGE,
|
||||
SIGNAL_ZONE_FAULT,
|
||||
SIGNAL_ZONE_RESTORE,
|
||||
ZONE_SCHEMA,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_RF_BIT0 = 'rf_bit0'
|
||||
ATTR_RF_LOW_BAT = 'rf_low_battery'
|
||||
ATTR_RF_SUPERVISED = 'rf_supervised'
|
||||
ATTR_RF_BIT3 = 'rf_bit3'
|
||||
ATTR_RF_LOOP3 = 'rf_loop3'
|
||||
ATTR_RF_LOOP2 = 'rf_loop2'
|
||||
ATTR_RF_LOOP4 = 'rf_loop4'
|
||||
ATTR_RF_LOOP1 = 'rf_loop1'
|
||||
ATTR_RF_BIT0 = "rf_bit0"
|
||||
ATTR_RF_LOW_BAT = "rf_low_battery"
|
||||
ATTR_RF_SUPERVISED = "rf_supervised"
|
||||
ATTR_RF_BIT3 = "rf_bit3"
|
||||
ATTR_RF_LOOP3 = "rf_loop3"
|
||||
ATTR_RF_LOOP2 = "rf_loop2"
|
||||
ATTR_RF_LOOP4 = "rf_loop4"
|
||||
ATTR_RF_LOOP1 = "rf_loop1"
|
||||
|
||||
|
||||
def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
@@ -34,8 +44,8 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
relay_addr = device_config_data.get(CONF_RELAY_ADDR)
|
||||
relay_chan = device_config_data.get(CONF_RELAY_CHAN)
|
||||
device = AlarmDecoderBinarySensor(
|
||||
zone_num, zone_name, zone_type, zone_rfid, zone_loop, relay_addr,
|
||||
relay_chan)
|
||||
zone_num, zone_name, zone_type, zone_rfid, zone_loop, relay_addr, relay_chan
|
||||
)
|
||||
devices.append(device)
|
||||
|
||||
add_entities(devices)
|
||||
@@ -46,8 +56,16 @@ def setup_platform(hass, config, add_entities, discovery_info=None):
|
||||
class AlarmDecoderBinarySensor(BinarySensorDevice):
|
||||
"""Representation of an AlarmDecoder binary sensor."""
|
||||
|
||||
def __init__(self, zone_number, zone_name, zone_type, zone_rfid, zone_loop,
|
||||
relay_addr, relay_chan):
|
||||
def __init__(
|
||||
self,
|
||||
zone_number,
|
||||
zone_name,
|
||||
zone_type,
|
||||
zone_rfid,
|
||||
zone_loop,
|
||||
relay_addr,
|
||||
relay_chan,
|
||||
):
|
||||
"""Initialize the binary_sensor."""
|
||||
self._zone_number = zone_number
|
||||
self._zone_type = zone_type
|
||||
@@ -62,16 +80,20 @@ class AlarmDecoderBinarySensor(BinarySensorDevice):
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
self.hass.helpers.dispatcher.async_dispatcher_connect(
|
||||
SIGNAL_ZONE_FAULT, self._fault_callback)
|
||||
SIGNAL_ZONE_FAULT, self._fault_callback
|
||||
)
|
||||
|
||||
self.hass.helpers.dispatcher.async_dispatcher_connect(
|
||||
SIGNAL_ZONE_RESTORE, self._restore_callback)
|
||||
SIGNAL_ZONE_RESTORE, self._restore_callback
|
||||
)
|
||||
|
||||
self.hass.helpers.dispatcher.async_dispatcher_connect(
|
||||
SIGNAL_RFX_MESSAGE, self._rfx_message_callback)
|
||||
SIGNAL_RFX_MESSAGE, self._rfx_message_callback
|
||||
)
|
||||
|
||||
self.hass.helpers.dispatcher.async_dispatcher_connect(
|
||||
SIGNAL_REL_MESSAGE, self._rel_message_callback)
|
||||
SIGNAL_REL_MESSAGE, self._rel_message_callback
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
@@ -130,9 +152,9 @@ class AlarmDecoderBinarySensor(BinarySensorDevice):
|
||||
|
||||
def _rel_message_callback(self, message):
|
||||
"""Update relay state."""
|
||||
if (self._relay_addr == message.address and
|
||||
self._relay_chan == message.channel):
|
||||
_LOGGER.debug("Relay %d:%d value:%d", message.address,
|
||||
message.channel, message.value)
|
||||
if self._relay_addr == message.address and self._relay_chan == message.channel:
|
||||
_LOGGER.debug(
|
||||
"Relay %d:%d value:%d", message.address, message.channel, message.value
|
||||
)
|
||||
self._state = message.value
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
@@ -24,13 +24,14 @@ class AlarmDecoderSensor(Entity):
|
||||
"""Initialize the alarm panel."""
|
||||
self._display = ""
|
||||
self._state = None
|
||||
self._icon = 'mdi:alarm-check'
|
||||
self._name = 'Alarm Panel Display'
|
||||
self._icon = "mdi:alarm-check"
|
||||
self._name = "Alarm Panel Display"
|
||||
|
||||
async def async_added_to_hass(self):
|
||||
"""Register callbacks."""
|
||||
self.hass.helpers.dispatcher.async_dispatcher_connect(
|
||||
SIGNAL_PANEL_MESSAGE, self._message_callback)
|
||||
SIGNAL_PANEL_MESSAGE, self._message_callback
|
||||
)
|
||||
|
||||
def _message_callback(self, message):
|
||||
if self._display != message.text:
|
||||
|
||||
@@ -7,25 +7,32 @@ import voluptuous as vol
|
||||
import homeassistant.components.alarm_control_panel as alarm
|
||||
from homeassistant.components.alarm_control_panel import PLATFORM_SCHEMA
|
||||
from homeassistant.const import (
|
||||
CONF_CODE, CONF_NAME, CONF_PASSWORD, CONF_USERNAME, STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_HOME, STATE_ALARM_DISARMED)
|
||||
CONF_CODE,
|
||||
CONF_NAME,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
STATE_ALARM_ARMED_AWAY,
|
||||
STATE_ALARM_ARMED_HOME,
|
||||
STATE_ALARM_DISARMED,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_NAME = 'Alarm.com'
|
||||
DEFAULT_NAME = "Alarm.com"
|
||||
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Optional(CONF_CODE): cv.positive_int,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
})
|
||||
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): cv.string,
|
||||
vol.Required(CONF_USERNAME): cv.string,
|
||||
vol.Optional(CONF_CODE): cv.positive_int,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(hass, config, async_add_entities,
|
||||
discovery_info=None):
|
||||
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
|
||||
"""Set up a Alarm.com control panel."""
|
||||
name = config.get(CONF_NAME)
|
||||
code = config.get(CONF_CODE)
|
||||
@@ -43,7 +50,8 @@ class AlarmDotCom(alarm.AlarmControlPanel):
|
||||
def __init__(self, hass, name, code, username, password):
|
||||
"""Initialize the Alarm.com status."""
|
||||
from pyalarmdotcom import Alarmdotcom
|
||||
_LOGGER.debug('Setting up Alarm.com...')
|
||||
|
||||
_LOGGER.debug("Setting up Alarm.com...")
|
||||
self._hass = hass
|
||||
self._name = name
|
||||
self._code = str(code) if code else None
|
||||
@@ -51,8 +59,7 @@ class AlarmDotCom(alarm.AlarmControlPanel):
|
||||
self._password = password
|
||||
self._websession = async_get_clientsession(self._hass)
|
||||
self._state = None
|
||||
self._alarm = Alarmdotcom(
|
||||
username, password, self._websession, hass.loop)
|
||||
self._alarm = Alarmdotcom(username, password, self._websession, hass.loop)
|
||||
|
||||
async def async_login(self):
|
||||
"""Login to Alarm.com."""
|
||||
@@ -73,27 +80,25 @@ class AlarmDotCom(alarm.AlarmControlPanel):
|
||||
"""Return one or more digits/characters."""
|
||||
if self._code is None:
|
||||
return None
|
||||
if isinstance(self._code, str) and re.search('^\\d+$', self._code):
|
||||
if isinstance(self._code, str) and re.search("^\\d+$", self._code):
|
||||
return alarm.FORMAT_NUMBER
|
||||
return alarm.FORMAT_TEXT
|
||||
|
||||
@property
|
||||
def state(self):
|
||||
"""Return the state of the device."""
|
||||
if self._alarm.state.lower() == 'disarmed':
|
||||
if self._alarm.state.lower() == "disarmed":
|
||||
return STATE_ALARM_DISARMED
|
||||
if self._alarm.state.lower() == 'armed stay':
|
||||
if self._alarm.state.lower() == "armed stay":
|
||||
return STATE_ALARM_ARMED_HOME
|
||||
if self._alarm.state.lower() == 'armed away':
|
||||
if self._alarm.state.lower() == "armed away":
|
||||
return STATE_ALARM_ARMED_AWAY
|
||||
return None
|
||||
|
||||
@property
|
||||
def device_state_attributes(self):
|
||||
"""Return the state attributes."""
|
||||
return {
|
||||
'sensor_status': self._alarm.sensor_status
|
||||
}
|
||||
return {"sensor_status": self._alarm.sensor_status}
|
||||
|
||||
async def async_alarm_disarm(self, code=None):
|
||||
"""Send disarm command."""
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user