mirror of
https://github.com/home-assistant/core.git
synced 2025-10-29 13:39:29 +00:00
Compare commits
1151 Commits
media-sour
...
edenhaus-g
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b5e1869a90 | ||
|
|
6c919e698f | ||
|
|
5d644815fa | ||
|
|
8dfa0f2f65 | ||
|
|
f9484acbfa | ||
|
|
d0c0247086 | ||
|
|
b116619af1 | ||
|
|
a3d760156f | ||
|
|
6e194ad6ef | ||
|
|
1e2a21b69f | ||
|
|
e90fe96b4e | ||
|
|
4774ed508a | ||
|
|
8f4a4d4c47 | ||
|
|
a83bbe2332 | ||
|
|
e5b93d3275 | ||
|
|
1c024f58af | ||
|
|
fa86148df0 | ||
|
|
7c6bbb97ea | ||
|
|
a5af501da4 | ||
|
|
f23cfb5594 | ||
|
|
67a12dc007 | ||
|
|
5783b3a576 | ||
|
|
7bc43039bd | ||
|
|
23e2316c36 | ||
|
|
9e9c8f5724 | ||
|
|
11772dbc46 | ||
|
|
c12df5d776 | ||
|
|
b57ca143e6 | ||
|
|
b3e16bd4fa | ||
|
|
18d5035877 | ||
|
|
d6db50fcc7 | ||
|
|
84d9fa3bd7 | ||
|
|
b08eb3a201 | ||
|
|
c74c317922 | ||
|
|
9edc6249ca | ||
|
|
4fbcb79889 | ||
|
|
68fd5bc67e | ||
|
|
882d047bb5 | ||
|
|
5c070c8f03 | ||
|
|
854882d612 | ||
|
|
b078c0ee7e | ||
|
|
080b16a33d | ||
|
|
6a1cf9827c | ||
|
|
23e7b14eae | ||
|
|
2a5cf83f50 | ||
|
|
5dcb68cdf6 | ||
|
|
fedeca107a | ||
|
|
4fef19c7bc | ||
|
|
8c953b0c4e | ||
|
|
949544874f | ||
|
|
237407010a | ||
|
|
64e48816c7 | ||
|
|
6b76b3e729 | ||
|
|
4912280193 | ||
|
|
d4e72ad2cf | ||
|
|
711526fc6c | ||
|
|
4be428fce7 | ||
|
|
ea226806a0 | ||
|
|
bc77daf2ce | ||
|
|
acead56bd5 | ||
|
|
fd08c55b79 | ||
|
|
0c342c4750 | ||
|
|
da6986e58c | ||
|
|
2f5fbc1f0e | ||
|
|
e79c76cd35 | ||
|
|
6edafd8965 | ||
|
|
204ff5d45f | ||
|
|
591eb94515 | ||
|
|
0f3de627c5 | ||
|
|
b2699d8a03 | ||
|
|
769a770cf1 | ||
|
|
2d96e8ac4d | ||
|
|
354cacdcae | ||
|
|
d999dd05d1 | ||
|
|
81572c6a84 | ||
|
|
8165ac196f | ||
|
|
41c95247ec | ||
|
|
2eb3360e8c | ||
|
|
fcd07902b0 | ||
|
|
71f94cad97 | ||
|
|
05277aa708 | ||
|
|
9f74471d22 | ||
|
|
1c8487a7e7 | ||
|
|
3c8612b6fd | ||
|
|
f28892c526 | ||
|
|
24b7cf261c | ||
|
|
ef69e6d54b | ||
|
|
ca31a279fa | ||
|
|
e50c4c4787 | ||
|
|
3ecddda8dd | ||
|
|
af77f835a5 | ||
|
|
6de2016aa3 | ||
|
|
f1e72c1616 | ||
|
|
7af3eb638b | ||
|
|
363e5f088c | ||
|
|
5b1e3ef574 | ||
|
|
d607323731 | ||
|
|
31f595a3f8 | ||
|
|
9a27805349 | ||
|
|
477cdbb711 | ||
|
|
62b39fdd10 | ||
|
|
f806cc8b4b | ||
|
|
b6108001e4 | ||
|
|
56f33a8a5f | ||
|
|
1e91ad6e23 | ||
|
|
9032de4b26 | ||
|
|
553fcb5156 | ||
|
|
378295e1cc | ||
|
|
ff95c6235f | ||
|
|
d398a13899 | ||
|
|
10b300e573 | ||
|
|
e95c0ef3a8 | ||
|
|
3b09adb360 | ||
|
|
d2380608e1 | ||
|
|
37188a0832 | ||
|
|
3134fd75e8 | ||
|
|
861f4a0578 | ||
|
|
a82c512472 | ||
|
|
10392d9719 | ||
|
|
b7acc66153 | ||
|
|
6249cabcba | ||
|
|
84f2fd106d | ||
|
|
45cc68d3e4 | ||
|
|
7fd75c7742 | ||
|
|
9522b11042 | ||
|
|
c874c4ac73 | ||
|
|
907ef8fa15 | ||
|
|
bc93153c40 | ||
|
|
6964829699 | ||
|
|
62e59608b0 | ||
|
|
9507b3f3aa | ||
|
|
1d187abe10 | ||
|
|
0464cb8929 | ||
|
|
f410d94f80 | ||
|
|
dee3c11203 | ||
|
|
06e4b0a798 | ||
|
|
2fd55a49cb | ||
|
|
80d7224dcf | ||
|
|
9d03b1b9b4 | ||
|
|
cecdf553f3 | ||
|
|
54e6fbc042 | ||
|
|
9c098d3471 | ||
|
|
394575e4f7 | ||
|
|
effc33d0d2 | ||
|
|
7af4c337c6 | ||
|
|
4f222d7adf | ||
|
|
00f16812e4 | ||
|
|
0efaf7efe8 | ||
|
|
55643f0632 | ||
|
|
36f4723f6e | ||
|
|
03bc698936 | ||
|
|
0c1dc73422 | ||
|
|
c31537081b | ||
|
|
d13067abb3 | ||
|
|
64da32b5f9 | ||
|
|
3990fc6ab2 | ||
|
|
e4071bd305 | ||
|
|
8dda26c227 | ||
|
|
b182d5ce87 | ||
|
|
175365bdea | ||
|
|
cbe52cbfca | ||
|
|
9251dde2c6 | ||
|
|
24d77cc453 | ||
|
|
a1f98abe49 | ||
|
|
d25dde1d11 | ||
|
|
8ec483b38b | ||
|
|
bf14caca69 | ||
|
|
e5fb6b2fb2 | ||
|
|
7dfeb3a3f6 | ||
|
|
9d3b1562c4 | ||
|
|
e14407f066 | ||
|
|
67872e3746 | ||
|
|
06bd1a2003 | ||
|
|
37ea360304 | ||
|
|
25ce57424c | ||
|
|
3d46ab549d | ||
|
|
567cc9f842 | ||
|
|
b5457a5abd | ||
|
|
e4b5e35d1d | ||
|
|
12023c33b5 | ||
|
|
a28749937c | ||
|
|
3fe37d651f | ||
|
|
cb3424cdf0 | ||
|
|
a799f7ff91 | ||
|
|
34ab725b75 | ||
|
|
2dfc7f02ba | ||
|
|
c8919222bd | ||
|
|
a888264d2f | ||
|
|
ae84c7e15d | ||
|
|
415c8b490b | ||
|
|
6038f15406 | ||
|
|
a8758253c4 | ||
|
|
fa4eb2e820 | ||
|
|
58f35d0614 | ||
|
|
f72a91ca29 | ||
|
|
5d99da6e1f | ||
|
|
64746eb99c | ||
|
|
70fc6df599 | ||
|
|
8dc33ece7b | ||
|
|
3d4d8e7f20 | ||
|
|
c92d319e12 | ||
|
|
1bdba7906a | ||
|
|
aa8198d852 | ||
|
|
b7f30ec17f | ||
|
|
2da1878f60 | ||
|
|
872b33a088 | ||
|
|
e0faa36157 | ||
|
|
14b270a2db | ||
|
|
8402bead4f | ||
|
|
6bf7a4278e | ||
|
|
3de62b2b4c | ||
|
|
0d2558c030 | ||
|
|
9efbcb2f82 | ||
|
|
f210bb35ed | ||
|
|
0581ceb771 | ||
|
|
7ba2e60af3 | ||
|
|
75fa0ffd04 | ||
|
|
01effb7ca6 | ||
|
|
88d383962c | ||
|
|
3c001bd6ed | ||
|
|
ec5c4843d1 | ||
|
|
e2c281549e | ||
|
|
051e472537 | ||
|
|
1e5910215d | ||
|
|
645089edba | ||
|
|
7abe289681 | ||
|
|
7829c2d03e | ||
|
|
148a13361f | ||
|
|
57dccd1474 | ||
|
|
a3b0132299 | ||
|
|
fbd8443745 | ||
|
|
cd7015c6b7 | ||
|
|
1012c7bdf9 | ||
|
|
ca912906f5 | ||
|
|
d0cad43a6c | ||
|
|
751540e606 | ||
|
|
3d2ec712f1 | ||
|
|
e3a6c06997 | ||
|
|
08b94e29e6 | ||
|
|
79323189fb | ||
|
|
7508828518 | ||
|
|
f257e89b2a | ||
|
|
a2e469eb28 | ||
|
|
7c80491325 | ||
|
|
adedf2037a | ||
|
|
188459e3ff | ||
|
|
7324a12ada | ||
|
|
fe07e9c840 | ||
|
|
afeaf2409f | ||
|
|
69f9c0a6cc | ||
|
|
46f52db87c | ||
|
|
d877761dbb | ||
|
|
95da65f552 | ||
|
|
6ec82d0b21 | ||
|
|
f6a16f63a4 | ||
|
|
9ff2dab468 | ||
|
|
9422703288 | ||
|
|
d91eccb209 | ||
|
|
939cbc8644 | ||
|
|
0f1d2a77cb | ||
|
|
385fc5b3d0 | ||
|
|
18c63e3b8f | ||
|
|
cf477186aa | ||
|
|
0eef44be91 | ||
|
|
e7ac56c59f | ||
|
|
3cc4091f31 | ||
|
|
00025c8f42 | ||
|
|
db48f8cb28 | ||
|
|
4fdbe82df2 | ||
|
|
742f1b2157 | ||
|
|
681eb6b594 | ||
|
|
1d6c6628f4 | ||
|
|
b6337c07d6 | ||
|
|
8b6fb05ee4 | ||
|
|
28405e2b04 | ||
|
|
31857a03d6 | ||
|
|
97a0a4ea17 | ||
|
|
b494074ee0 | ||
|
|
6aff1287dd | ||
|
|
655de3dfd2 | ||
|
|
11ee7d63be | ||
|
|
080a7dcfa7 | ||
|
|
3e20c506f4 | ||
|
|
2abc197dcd | ||
|
|
a3dec46d59 | ||
|
|
7a3630e647 | ||
|
|
2812d7c712 | ||
|
|
c0fc7b66f0 | ||
|
|
c6e334ca60 | ||
|
|
416f6b922c | ||
|
|
d2af875d63 | ||
|
|
1237010b4a | ||
|
|
26fec2fdcc | ||
|
|
13e828038d | ||
|
|
b517774be0 | ||
|
|
6e515d4829 | ||
|
|
7f5128eb15 | ||
|
|
7ddfcd350b | ||
|
|
a92e73ff17 | ||
|
|
ae3d32073c | ||
|
|
38d0299951 | ||
|
|
8dba1edbe5 | ||
|
|
f3c4288026 | ||
|
|
8db6505a97 | ||
|
|
61a9094d5f | ||
|
|
d140eb4c76 | ||
|
|
21f24c2f6a | ||
|
|
85b26479de | ||
|
|
bddbf9c73c | ||
|
|
64f48564ff | ||
|
|
06e4922021 | ||
|
|
cdc6c44a49 | ||
|
|
106a74c954 | ||
|
|
8464dad8e0 | ||
|
|
c3e2f0e19b | ||
|
|
fbf875b5af | ||
|
|
fcea5e0da6 | ||
|
|
81fd9e1c5a | ||
|
|
d108d5f106 | ||
|
|
487940872e | ||
|
|
aaf58075c6 | ||
|
|
a23bed6f4d | ||
|
|
02e05643f1 | ||
|
|
5f9b098c19 | ||
|
|
143f7df7fd | ||
|
|
9a28ee5378 | ||
|
|
82f33fbc39 | ||
|
|
6a632a71b6 | ||
|
|
ae8678b2af | ||
|
|
b52ee6915a | ||
|
|
b0e1b00598 | ||
|
|
fd902af23b | ||
|
|
07d6ebef4c | ||
|
|
c9b9f05f4b | ||
|
|
90a0262217 | ||
|
|
324aa09ebe | ||
|
|
663431fc80 | ||
|
|
610183c11b | ||
|
|
b7718f6f0f | ||
|
|
5708f61964 | ||
|
|
4fb3c9fed2 | ||
|
|
1e5f5f4ad3 | ||
|
|
82c536a4e9 | ||
|
|
97afec1912 | ||
|
|
0bfdd70730 | ||
|
|
01dee6507b | ||
|
|
04f83bc067 | ||
|
|
f0756af52d | ||
|
|
dd6bc715d8 | ||
|
|
1452aec47f | ||
|
|
6f8439de5b | ||
|
|
f649717372 | ||
|
|
bf273ef407 | ||
|
|
94d015e00a | ||
|
|
f185ffddf1 | ||
|
|
2d0b4dd7e9 | ||
|
|
eab1205823 | ||
|
|
a991dcbe6a | ||
|
|
6f79a65762 | ||
|
|
ce1fdc6b75 | ||
|
|
d7aa0834c7 | ||
|
|
3151384867 | ||
|
|
8aa5e7de91 | ||
|
|
cca5c807ad | ||
|
|
89433219dd | ||
|
|
694b169c79 | ||
|
|
f1e0954c61 | ||
|
|
3c3b4ef14a | ||
|
|
54ff49115c | ||
|
|
2512dad843 | ||
|
|
a3b67d5f28 | ||
|
|
76a0b2d616 | ||
|
|
1182082c1f | ||
|
|
e0811558cb | ||
|
|
d389405218 | ||
|
|
3a71087c9c | ||
|
|
c7d7cfa7ad | ||
|
|
e4ea79866d | ||
|
|
ddfa6f33d2 | ||
|
|
15e99650aa | ||
|
|
58bacbb84e | ||
|
|
82758f7671 | ||
|
|
7739cdc626 | ||
|
|
4ca1ae61aa | ||
|
|
3d130a9bdf | ||
|
|
2b38f33d50 | ||
|
|
19dedb038e | ||
|
|
59781422f7 | ||
|
|
083277d1ff | ||
|
|
9b9c55b37b | ||
|
|
c9d67d596b | ||
|
|
7948b35265 | ||
|
|
be843970fd | ||
|
|
53b65b2fb4 | ||
|
|
ac7be97245 | ||
|
|
09e539bf0e | ||
|
|
6ef1b3bad3 | ||
|
|
38e46f7a53 | ||
|
|
ef60d16659 | ||
|
|
bf4f8b48a3 | ||
|
|
3c1496d2bb | ||
|
|
d457787639 | ||
|
|
de4bfd6f05 | ||
|
|
34c5748132 | ||
|
|
5bfd9620db | ||
|
|
6f8766e4bd | ||
|
|
d3b519846b | ||
|
|
36d952800b | ||
|
|
b832561e53 | ||
|
|
c59d295bf2 | ||
|
|
6e28e3aed1 | ||
|
|
6d8944d379 | ||
|
|
762fd6d241 | ||
|
|
4c6500e7a4 | ||
|
|
cdc224715f | ||
|
|
648b250fc8 | ||
|
|
ba61562300 | ||
|
|
8d67182e0e | ||
|
|
3ce1ef4c3f | ||
|
|
bde4eb5011 | ||
|
|
a58a7065b6 | ||
|
|
0c9b72bf1d | ||
|
|
541d94d8c6 | ||
|
|
c370c86a4f | ||
|
|
bc6accf4ae | ||
|
|
d40eeee422 | ||
|
|
c9d9730c4a | ||
|
|
d3a8f3191b | ||
|
|
cb3829ddee | ||
|
|
73383e6c26 | ||
|
|
217894ee8b | ||
|
|
c7321a337e | ||
|
|
517124dfbe | ||
|
|
f49299b009 | ||
|
|
1001da08f6 | ||
|
|
0da019404c | ||
|
|
9a4280d0de | ||
|
|
c28e105df5 | ||
|
|
68787248f6 | ||
|
|
36be6b6187 | ||
|
|
42dea92c51 | ||
|
|
4b828d4753 | ||
|
|
8e79c38f34 | ||
|
|
c92107b8d4 | ||
|
|
b25622f40e | ||
|
|
e887d5e6ad | ||
|
|
1f19e40cfe | ||
|
|
3d2d2271d3 | ||
|
|
d1dd5eecd6 | ||
|
|
cdec29ffb7 | ||
|
|
07f3e00f18 | ||
|
|
084d029168 | ||
|
|
17e997ee18 | ||
|
|
16d4c6c95a | ||
|
|
0205a636ef | ||
|
|
4707fd2f94 | ||
|
|
ad3cadab83 | ||
|
|
3fce815415 | ||
|
|
ee67619cb1 | ||
|
|
1a744a2c91 | ||
|
|
951978e483 | ||
|
|
54d30377d3 | ||
|
|
eb04dda197 | ||
|
|
1e192aadfa | ||
|
|
6f680f3d03 | ||
|
|
f0663dc275 | ||
|
|
96bb67bef9 | ||
|
|
929d76e236 | ||
|
|
fe1ff083de | ||
|
|
90c68f8ad0 | ||
|
|
6b79aa7738 | ||
|
|
f6fb4c8d5a | ||
|
|
a6e575ecfa | ||
|
|
85392ae167 | ||
|
|
9d124be491 | ||
|
|
8bca3931ab | ||
|
|
0367a01287 | ||
|
|
86e2c2f361 | ||
|
|
335c8e50a2 | ||
|
|
8152a9e5da | ||
|
|
250e562caf | ||
|
|
a3b641e53d | ||
|
|
135ea4c02e | ||
|
|
bc980c1212 | ||
|
|
59ca88a7e8 | ||
|
|
d45114cd11 | ||
|
|
2eba650064 | ||
|
|
de4adb8855 | ||
|
|
1d86c03b02 | ||
|
|
77fb1036cc | ||
|
|
b15b4e4888 | ||
|
|
dddf6d5f1a | ||
|
|
66fb5f4d95 | ||
|
|
42a9d5d4e3 | ||
|
|
93fa162913 | ||
|
|
c432b1c8da | ||
|
|
00955b8e6a | ||
|
|
045b9d7f01 | ||
|
|
438c4c7871 | ||
|
|
abc360460c | ||
|
|
26437bb253 | ||
|
|
56d953ac1e | ||
|
|
fe4eb8766d | ||
|
|
2d9f14c401 | ||
|
|
7b6ccb07fd | ||
|
|
2ba5728060 | ||
|
|
b5f163cc85 | ||
|
|
65540a3e0b | ||
|
|
cbf1b39edb | ||
|
|
142daf5e49 | ||
|
|
8bd0ff7cca | ||
|
|
ac676e12f6 | ||
|
|
c0ac3292cd | ||
|
|
80fd07c128 | ||
|
|
3701d8859a | ||
|
|
6dd26bae88 | ||
|
|
1a0abe296c | ||
|
|
de6c61a4ab | ||
|
|
33c677596e | ||
|
|
e9b4b8e99b | ||
|
|
0525c04c42 | ||
|
|
d57b502551 | ||
|
|
9fb708baf4 | ||
|
|
abdf24b7a0 | ||
|
|
29bfbd27bb | ||
|
|
224553f8d9 | ||
|
|
7c9f6a061f | ||
|
|
8e115d4685 | ||
|
|
00c189844f | ||
|
|
4587c286bb | ||
|
|
b46097a7fc | ||
|
|
299cb6a2ff | ||
|
|
1b7b91b328 | ||
|
|
01a1480ebd | ||
|
|
26b8abb118 | ||
|
|
53d1bbb530 | ||
|
|
a3ef55274e | ||
|
|
2034915457 | ||
|
|
9e46d7964a | ||
|
|
f9828a227b | ||
|
|
3341fa5f33 | ||
|
|
e38ae47e76 | ||
|
|
934c0e3c4c | ||
|
|
994a6ae7ed | ||
|
|
cdbe93c289 | ||
|
|
56f90e4d96 | ||
|
|
34977abfec | ||
|
|
5622103eb1 | ||
|
|
b9a1ab4a44 | ||
|
|
18997833c4 | ||
|
|
f99b194afc | ||
|
|
566a347da7 | ||
|
|
881306f6a4 | ||
|
|
f63504af01 | ||
|
|
d140b82a70 | ||
|
|
681211b1a5 | ||
|
|
6c8b1f3618 | ||
|
|
d341065c34 | ||
|
|
81b1346080 | ||
|
|
5613be3980 | ||
|
|
fbcf0eb94c | ||
|
|
1c7b9cc354 | ||
|
|
75e900606e | ||
|
|
7c665c53b5 | ||
|
|
f72047eb02 | ||
|
|
ade424c074 | ||
|
|
5ad805de3c | ||
|
|
ece77cf620 | ||
|
|
7eaa559056 | ||
|
|
08a9377373 | ||
|
|
a2837e6aee | ||
|
|
fa03f6194d | ||
|
|
d2851ea1df | ||
|
|
72f8ac7857 | ||
|
|
77a267bc2f | ||
|
|
ad238daadc | ||
|
|
42370ba203 | ||
|
|
d9691c2a3b | ||
|
|
66cca981a9 | ||
|
|
9640ebb593 | ||
|
|
645f32fd65 | ||
|
|
cb6e65f972 | ||
|
|
425bdc0ba6 | ||
|
|
c36341e51f | ||
|
|
553d896899 | ||
|
|
ac79b3072e | ||
|
|
c0aa9bfd4b | ||
|
|
e97100028d | ||
|
|
da89617432 | ||
|
|
e6203dffd3 | ||
|
|
c13cfe9c37 | ||
|
|
2447df9341 | ||
|
|
1c1fbe0ec1 | ||
|
|
4a6d2017fd | ||
|
|
b4997a52df | ||
|
|
464dec1dcb | ||
|
|
85506ac78a | ||
|
|
6d97355b42 | ||
|
|
f9e75c616a | ||
|
|
a821d02dfb | ||
|
|
e05169c7a4 | ||
|
|
1cc3431529 | ||
|
|
4ba765f265 | ||
|
|
50a7af4179 | ||
|
|
e0a2116e88 | ||
|
|
d8e1ed5f4a | ||
|
|
f1b8e8a963 | ||
|
|
9a9fd44c62 | ||
|
|
bc3fe7a18e | ||
|
|
19f3559345 | ||
|
|
fad0e23797 | ||
|
|
7f931e4d70 | ||
|
|
a04835629b | ||
|
|
78cd80746d | ||
|
|
9ac93920d8 | ||
|
|
1818fce1ae | ||
|
|
f524edc4b9 | ||
|
|
19f990ed31 | ||
|
|
5d83c82b81 | ||
|
|
d63d154457 | ||
|
|
933b15ce36 | ||
|
|
6ec7b63ebe | ||
|
|
26bfbc55e9 | ||
|
|
d75ca0f5f3 | ||
|
|
fed8f137e9 | ||
|
|
f44d65e023 | ||
|
|
a270bd76de | ||
|
|
9209e419ec | ||
|
|
98f8f15e90 | ||
|
|
b2a2868afd | ||
|
|
0d4737d360 | ||
|
|
2b370a0eca | ||
|
|
618fe81207 | ||
|
|
c0fe4861f9 | ||
|
|
dfd33fdab1 | ||
|
|
cceee05c15 | ||
|
|
f560d2a05e | ||
|
|
3601cff88e | ||
|
|
ca5c0a759f | ||
|
|
6f9e6909ce | ||
|
|
ccf563437b | ||
|
|
78e97428fd | ||
|
|
8b4c730993 | ||
|
|
0a071a13e2 | ||
|
|
ab80991eac | ||
|
|
ee7262efb4 | ||
|
|
ea5a52cdc8 | ||
|
|
31fe0322ab | ||
|
|
e8e0eabb99 | ||
|
|
1629dad1a8 | ||
|
|
d9baad530a | ||
|
|
4a1d00e59a | ||
|
|
437e4e027c | ||
|
|
3726f7eca9 | ||
|
|
c943cf515c | ||
|
|
3b0c2a7e56 | ||
|
|
6ebaa9cd1d | ||
|
|
f81c32f6ea | ||
|
|
c0cd7a1a62 | ||
|
|
7a61c818c6 | ||
|
|
2800625bcf | ||
|
|
cfec998221 | ||
|
|
7203cffbd7 | ||
|
|
23397ef6a9 | ||
|
|
0e154635ff | ||
|
|
2e6e518722 | ||
|
|
e0cded97c7 | ||
|
|
87a6a029bb | ||
|
|
1cc3c22d3f | ||
|
|
2341d1d965 | ||
|
|
a0bae9485c | ||
|
|
f281b0fc6b | ||
|
|
6f89fe81cc | ||
|
|
34f6ead7a1 | ||
|
|
8985527a87 | ||
|
|
bd87a3aa4d | ||
|
|
768a505904 | ||
|
|
d97c1f0fc3 | ||
|
|
c3fcd34d4c | ||
|
|
44d9eaea95 | ||
|
|
0f34f5139a | ||
|
|
2afb1a673d | ||
|
|
c2f7f29630 | ||
|
|
b01f5dd24b | ||
|
|
0cda0c449f | ||
|
|
40fdf12bc9 | ||
|
|
3939a80302 | ||
|
|
d32a102613 | ||
|
|
20949d39c4 | ||
|
|
310a0c8d13 | ||
|
|
c9e80ac7e9 | ||
|
|
5df4e9e1cf | ||
|
|
4022ee74e8 | ||
|
|
80a4115c44 | ||
|
|
ce548efd80 | ||
|
|
2edf622b41 | ||
|
|
66ac9078aa | ||
|
|
ba75f18f5a | ||
|
|
8ee2ece03e | ||
|
|
7060ab8c44 | ||
|
|
85d8244b8a | ||
|
|
3f9421ab08 | ||
|
|
2f3fbf00b7 | ||
|
|
d595ec8a07 | ||
|
|
4ff5462cc4 | ||
|
|
404f95b442 | ||
|
|
89cf784022 | ||
|
|
02142f352d | ||
|
|
ec3dd7d1e5 | ||
|
|
7355799030 | ||
|
|
982166df3c | ||
|
|
c7d3512ad2 | ||
|
|
ada6f7b3fb | ||
|
|
78e16495bd | ||
|
|
12085e6152 | ||
|
|
6764463689 | ||
|
|
7055276665 | ||
|
|
71b3ebd15a | ||
|
|
b87910e596 | ||
|
|
e19bfd670b | ||
|
|
7b3c96e80b | ||
|
|
01ff3cf9d9 | ||
|
|
d66da0c10d | ||
|
|
3491bb1b40 | ||
|
|
3bf995eb71 | ||
|
|
2169ce1722 | ||
|
|
275e9485e9 | ||
|
|
95198ae540 | ||
|
|
aed2d3899d | ||
|
|
4011d62ac7 | ||
|
|
d2aa0573de | ||
|
|
571b2e3ab6 | ||
|
|
a7f48360b7 | ||
|
|
22f2f8680a | ||
|
|
d92004a9e7 | ||
|
|
64875894d6 | ||
|
|
3f7a288526 | ||
|
|
a2a067a81c | ||
|
|
f9f61b8da7 | ||
|
|
cd69b82fc9 | ||
|
|
d20631598e | ||
|
|
229ebe16f3 | ||
|
|
a172f67d37 | ||
|
|
ee4a1de566 | ||
|
|
7ab99c028c | ||
|
|
0e1d12b1ae | ||
|
|
e090ddd761 | ||
|
|
9721ce6877 | ||
|
|
8dde94f421 | ||
|
|
f5f6b22af1 | ||
|
|
f8a93b6561 | ||
|
|
840a03f048 | ||
|
|
85f3b5ce78 | ||
|
|
f4284fec2f | ||
|
|
3a89b3152f | ||
|
|
a0356328c3 | ||
|
|
4b6f37b1d7 | ||
|
|
716705fb5a | ||
|
|
d246836480 | ||
|
|
6ee2b82d15 | ||
|
|
73ff8d36a5 | ||
|
|
1397def3b8 | ||
|
|
d443529041 | ||
|
|
373bb20f1b | ||
|
|
3b44cce6dc | ||
|
|
46056fe45b | ||
|
|
1816c190b2 | ||
|
|
00abaee6b3 | ||
|
|
3a301f54e0 | ||
|
|
762accbd6d | ||
|
|
e0422d7d34 | ||
|
|
6ba2057a88 | ||
|
|
752969bce5 | ||
|
|
efbdfd2954 | ||
|
|
bb7a177a5d | ||
|
|
9b56ca8cde | ||
|
|
b0a08782e0 | ||
|
|
6c9955f220 | ||
|
|
f56b94c0f9 | ||
|
|
3cf035820b | ||
|
|
99a796d066 | ||
|
|
1cd1b1aba8 | ||
|
|
4131c14629 | ||
|
|
c2acda5796 | ||
|
|
4806e7e9d9 | ||
|
|
76606fd44f | ||
|
|
2983f1a3b6 | ||
|
|
8019779b3a | ||
|
|
62cdcbf422 | ||
|
|
b12a5a36e1 | ||
|
|
e32763e464 | ||
|
|
b85cf3f9d2 | ||
|
|
3777bcc2af | ||
|
|
9a29cc53ef | ||
|
|
52cde48ff0 | ||
|
|
bf1da35303 | ||
|
|
55d5e769b2 | ||
|
|
c1bf11da34 | ||
|
|
3c20325b37 | ||
|
|
6cd1283b00 | ||
|
|
dde60cdecb | ||
|
|
f03b16bdf8 | ||
|
|
fd8ccb8d8f | ||
|
|
d76e947021 | ||
|
|
c91ed96543 | ||
|
|
b164531ba8 | ||
|
|
7c623a8704 | ||
|
|
7ae3340336 | ||
|
|
653b73c601 | ||
|
|
f616e5a4e3 | ||
|
|
c0317f60cc | ||
|
|
8abfe424e1 | ||
|
|
8de200de0b | ||
|
|
f242e294be | ||
|
|
58cc7c8f84 | ||
|
|
bd10f6ec08 | ||
|
|
ed9cfb4c4b | ||
|
|
a6b6e4c4b8 | ||
|
|
36ff5c0d45 | ||
|
|
de6d34fec5 | ||
|
|
38f9067970 | ||
|
|
53a8a250d0 | ||
|
|
00f6d26ede | ||
|
|
6d09411c07 | ||
|
|
037e2bfd31 | ||
|
|
c893552d4a | ||
|
|
4fd10162c9 | ||
|
|
392ee5ae7e | ||
|
|
bf190609a0 | ||
|
|
e982ac1e53 | ||
|
|
b4747ea87b | ||
|
|
df69bcecb7 | ||
|
|
7c93d91bae | ||
|
|
07da0cfb2b | ||
|
|
b411a11c2c | ||
|
|
0555b84d05 | ||
|
|
790bddef63 | ||
|
|
a3089b8aa7 | ||
|
|
77c8426d63 | ||
|
|
faf226f6c2 | ||
|
|
06d143b81a | ||
|
|
08b6a0a702 | ||
|
|
a20d1e3656 | ||
|
|
36cc3682ca | ||
|
|
1b495ecafa | ||
|
|
7d1a0be07e | ||
|
|
327f65c991 | ||
|
|
4ac89f6849 | ||
|
|
db3b070ed0 | ||
|
|
6d940f476a | ||
|
|
1ca701dda4 | ||
|
|
291c44100c | ||
|
|
c8d676e06b | ||
|
|
4c1ae0eddc | ||
|
|
39eadc814f | ||
|
|
f7ecad61ba | ||
|
|
fa4cb54549 | ||
|
|
2be33c5e0a | ||
|
|
904d7e5d5a | ||
|
|
dbc4a65d48 | ||
|
|
b93f4aabf1 | ||
|
|
9eaa40c7a4 | ||
|
|
b308a882fb | ||
|
|
7f63ba2087 | ||
|
|
d7269cfcc6 | ||
|
|
2850a574f6 | ||
|
|
dcb8d4f702 | ||
|
|
aeadc0c4b0 | ||
|
|
683c6b17be | ||
|
|
69dd5c91b7 | ||
|
|
5cf7dfca8f | ||
|
|
62a49d4244 | ||
|
|
93ee6322f2 | ||
|
|
914990b58a | ||
|
|
f78bb5adb6 | ||
|
|
905f5e7289 | ||
|
|
ec503618c3 | ||
|
|
7a41cbc314 | ||
|
|
c58ba734e7 | ||
|
|
68f63be62f | ||
|
|
2aa4ca1351 | ||
|
|
fbabb27787 | ||
|
|
0960d78eb5 | ||
|
|
474b40511f | ||
|
|
18b80aced3 | ||
|
|
c75dca743a | ||
|
|
00d667ed51 | ||
|
|
51e098e807 | ||
|
|
5e2b27699e | ||
|
|
be942c2888 | ||
|
|
584c1fbd97 | ||
|
|
abc5c6e2b4 | ||
|
|
d9de964035 | ||
|
|
bb02158d1a | ||
|
|
be10f097c7 | ||
|
|
7084bca783 | ||
|
|
cd6f3a0fe5 | ||
|
|
af2888331d | ||
|
|
b92e5d7131 | ||
|
|
f7265c85d0 | ||
|
|
8466dbf69f | ||
|
|
2dd0d69bcd | ||
|
|
6783c4ad83 | ||
|
|
07d7f4e18d | ||
|
|
54b1749986 | ||
|
|
eaf264361f | ||
|
|
d8f6f17a4f | ||
|
|
9a969cea63 | ||
|
|
ef16327b2b | ||
|
|
a6a6261168 | ||
|
|
a01eb48db8 | ||
|
|
eb103a8d9a | ||
|
|
2b5f989855 | ||
|
|
4e247a6ebe | ||
|
|
b964d362b7 | ||
|
|
3914e41f3c | ||
|
|
82bdfcb99b | ||
|
|
976cea600f | ||
|
|
8c8713c3f7 | ||
|
|
2359ae6ce7 | ||
|
|
b570fd35c8 | ||
|
|
9d94e6b3b4 | ||
|
|
cfab789823 | ||
|
|
81917425dc | ||
|
|
bfb62709d4 | ||
|
|
ca3f2ee782 | ||
|
|
fc8703a40f | ||
|
|
80517c7ac1 | ||
|
|
2b4b46eaf8 | ||
|
|
40b9dae608 | ||
|
|
5975cd6e09 | ||
|
|
258c9ff52b | ||
|
|
89c5d498a4 | ||
|
|
76cb4d123a | ||
|
|
f0c29c7699 | ||
|
|
aa4151ced7 | ||
|
|
0a6fa978fa | ||
|
|
dc02002b9d | ||
|
|
f071a3f38b | ||
|
|
b935231e47 | ||
|
|
b9f7613567 | ||
|
|
1289a031ab | ||
|
|
289546ef6d | ||
|
|
aacff4db5d | ||
|
|
f833b56122 | ||
|
|
7eb0f2993f | ||
|
|
abb341abfe | ||
|
|
0d90614369 | ||
|
|
ec84bebeea | ||
|
|
9176867d6b | ||
|
|
281a137ff5 | ||
|
|
d6543480ac | ||
|
|
ae6391b866 | ||
|
|
10b56e4258 | ||
|
|
0ff2597957 | ||
|
|
026b28e962 | ||
|
|
9a1e67294a | ||
|
|
cdb448a5cc | ||
|
|
ab80e726e2 | ||
|
|
2d5d0f67b2 | ||
|
|
d4100b6096 | ||
|
|
955e854d77 | ||
|
|
0c37f88c49 | ||
|
|
48167eeb9c | ||
|
|
24177197f7 | ||
|
|
863fc0ba97 | ||
|
|
9f7b229d02 | ||
|
|
ffd909f3d9 | ||
|
|
1ebf096a33 | ||
|
|
96d51965e5 | ||
|
|
04b510b020 | ||
|
|
c9a301d50e | ||
|
|
b304bd1a8b | ||
|
|
b99525b231 | ||
|
|
634db13990 | ||
|
|
ad51a77989 | ||
|
|
3348a39e8a | ||
|
|
81c2e356ec | ||
|
|
de6c3512d2 | ||
|
|
36dc1e938a | ||
|
|
07a78cf6f7 | ||
|
|
eaa673e0c3 | ||
|
|
f2c4ca081f | ||
|
|
e3d707f0b4 | ||
|
|
fb93fed2e5 | ||
|
|
95dfc2f23d | ||
|
|
77f897a768 | ||
|
|
4f0a6ef9a1 | ||
|
|
408df2093a | ||
|
|
66c6b0f5fc | ||
|
|
dd01243391 | ||
|
|
66c17e250a | ||
|
|
723902e233 | ||
|
|
59fdb9f3b5 | ||
|
|
d83502514a | ||
|
|
08e81b2ba6 | ||
|
|
1e808c965d | ||
|
|
563b58c9aa | ||
|
|
cf223880e8 | ||
|
|
4058ca59ed | ||
|
|
1386c01733 | ||
|
|
46504947f7 | ||
|
|
0a44682014 | ||
|
|
06a57473a9 | ||
|
|
fbed66ef1f | ||
|
|
99a0380ec5 | ||
|
|
68c51dc7aa | ||
|
|
3d945b0fc5 | ||
|
|
7b26a93d38 | ||
|
|
1b2eab00be | ||
|
|
750e849f09 | ||
|
|
f32bf0cc3e | ||
|
|
dbbe3145b6 | ||
|
|
f8bf3ea2ef | ||
|
|
053bd31d43 | ||
|
|
1aefc3f37a | ||
|
|
3de955d9ce | ||
|
|
0ff88fd366 | ||
|
|
eb84020773 | ||
|
|
4bbfea3c7c | ||
|
|
63d4fb7558 | ||
|
|
953895cd81 | ||
|
|
a6c3f4efc0 | ||
|
|
11e880d034 | ||
|
|
e4d6bdb398 | ||
|
|
6ced1783e3 | ||
|
|
8051f78d10 | ||
|
|
b724176b23 | ||
|
|
fdca16ea92 | ||
|
|
f8fd8b432a | ||
|
|
9148ae70ce | ||
|
|
447cb26d28 | ||
|
|
2af36465f6 | ||
|
|
d5f7265424 | ||
|
|
cc16af7f2d | ||
|
|
7a4d75bc44 | ||
|
|
ec0380fd3b | ||
|
|
b17cc71dfb | ||
|
|
89b327ed7b | ||
|
|
9bf361a1b8 | ||
|
|
d11c171c75 | ||
|
|
c523c45d17 | ||
|
|
c1b9c0e1b6 | ||
|
|
487b9ff03e | ||
|
|
ec62b0cdfb | ||
|
|
6d0470064f | ||
|
|
7450b3fd1a | ||
|
|
5b70910d77 | ||
|
|
6aaddad56b | ||
|
|
a5af974209 | ||
|
|
09e45f6f54 | ||
|
|
d857d8850c | ||
|
|
ccc50f2412 | ||
|
|
3905723900 | ||
|
|
cee88473a2 | ||
|
|
cdf613d3f8 | ||
|
|
52de5ff5ff | ||
|
|
c4389a1679 | ||
|
|
35faaa6cae | ||
|
|
3c0b13975a | ||
|
|
bc88696339 | ||
|
|
8f99c3f64a | ||
|
|
88016d96d4 | ||
|
|
47df73b18f | ||
|
|
1c12d2b8cd | ||
|
|
eb38837a8c | ||
|
|
159c7fbfd1 | ||
|
|
7ee31f0884 | ||
|
|
0c5e12571a | ||
|
|
9db973217f | ||
|
|
cf1a745283 | ||
|
|
834e3f1963 | ||
|
|
3f8f7573c9 | ||
|
|
156a0f1a3d | ||
|
|
cc2a5b43dd | ||
|
|
0ae272f1f6 | ||
|
|
8774295e2e | ||
|
|
731064f7e9 | ||
|
|
2f75661c20 | ||
|
|
be6f056f30 | ||
|
|
79599e1284 | ||
|
|
a255585ab6 | ||
|
|
e9bde225fe | ||
|
|
d9521ac2a0 | ||
|
|
d8b24ccccd | ||
|
|
b4417a76d5 | ||
|
|
274f6eb54a | ||
|
|
21a5aaf35c | ||
|
|
05820a49d0 | ||
|
|
0c8d2594ef | ||
|
|
205bd2676b | ||
|
|
25849fd9cc | ||
|
|
7d6eac9ff7 | ||
|
|
31017ebc98 | ||
|
|
724a7b0ecc | ||
|
|
91e13d447a | ||
|
|
7c8ad9d535 | ||
|
|
9cd3ab853d | ||
|
|
0b0f8c5829 | ||
|
|
ae7bc7fb1b | ||
|
|
09750872b5 | ||
|
|
076e51017b | ||
|
|
95e7b00996 | ||
|
|
ddecf1ac21 | ||
|
|
17b12d29af | ||
|
|
9cc78680d6 | ||
|
|
14d42e43bf | ||
|
|
ed5f5d4b33 | ||
|
|
c3ba086fad | ||
|
|
7b5314605c | ||
|
|
3a806d6603 | ||
|
|
6dd33f900d | ||
|
|
2844bd474a | ||
|
|
d865fcf999 | ||
|
|
79a2fc5a01 | ||
|
|
19d87abb8a | ||
|
|
c4de46a85b | ||
|
|
e79a434d9b | ||
|
|
9a801424c7 | ||
|
|
5cb186980a | ||
|
|
1629ade97f | ||
|
|
ccf0011ac2 | ||
|
|
70077511a3 | ||
|
|
dfbaf66021 | ||
|
|
62cea48a58 | ||
|
|
c493c7dd67 | ||
|
|
fdaceaddfd | ||
|
|
a2f4073d54 | ||
|
|
2d01a99ec2 | ||
|
|
311d4c4262 | ||
|
|
e14f5ba44d | ||
|
|
9babc85517 | ||
|
|
332a3fad3c | ||
|
|
8782aa4f60 | ||
|
|
475b84cc5f | ||
|
|
0f904d418b | ||
|
|
4ea4eec2d8 | ||
|
|
afefa16615 | ||
|
|
1dccbee45c | ||
|
|
711a56db2f | ||
|
|
9d1c7dadff | ||
|
|
7d1953e387 | ||
|
|
023ecf2a64 | ||
|
|
934db458a3 | ||
|
|
0a6ae3b52a | ||
|
|
bdd0b74d51 | ||
|
|
8837f2aca7 | ||
|
|
403cd2d8ef | ||
|
|
ddfc528d63 | ||
|
|
ddea2206c3 | ||
|
|
32aacac550 |
@@ -58,6 +58,7 @@ base_platforms: &base_platforms
|
|||||||
# Extra components that trigger the full suite
|
# Extra components that trigger the full suite
|
||||||
components: &components
|
components: &components
|
||||||
- homeassistant/components/alexa/**
|
- homeassistant/components/alexa/**
|
||||||
|
- homeassistant/components/analytics/**
|
||||||
- homeassistant/components/application_credentials/**
|
- homeassistant/components/application_credentials/**
|
||||||
- homeassistant/components/assist_pipeline/**
|
- homeassistant/components/assist_pipeline/**
|
||||||
- homeassistant/components/auth/**
|
- homeassistant/components/auth/**
|
||||||
|
|||||||
@@ -41,6 +41,7 @@
|
|||||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||||
"python.testing.pytestArgs": ["--no-cov"],
|
"python.testing.pytestArgs": ["--no-cov"],
|
||||||
"pylint.importStrategy": "fromEnvironment",
|
"pylint.importStrategy": "fromEnvironment",
|
||||||
|
"python.analysis.typeCheckingMode": "basic",
|
||||||
"editor.formatOnPaste": false,
|
"editor.formatOnPaste": false,
|
||||||
"editor.formatOnSave": true,
|
"editor.formatOnSave": true,
|
||||||
"editor.formatOnType": true,
|
"editor.formatOnType": true,
|
||||||
|
|||||||
1
.github/copilot-instructions.md
vendored
1
.github/copilot-instructions.md
vendored
@@ -74,6 +74,7 @@ rules:
|
|||||||
- **Formatting**: Ruff
|
- **Formatting**: Ruff
|
||||||
- **Linting**: PyLint and Ruff
|
- **Linting**: PyLint and Ruff
|
||||||
- **Type Checking**: MyPy
|
- **Type Checking**: MyPy
|
||||||
|
- **Lint/Type/Format Fixes**: Always prefer addressing the underlying issue (e.g., import the typed source, update shared stubs, align with Ruff expectations, or correct formatting at the source) before disabling a rule, adding `# type: ignore`, or skipping a formatter. Treat suppressions and `noqa` comments as a last resort once no compliant fix exists
|
||||||
- **Testing**: pytest with plain functions and fixtures
|
- **Testing**: pytest with plain functions and fixtures
|
||||||
- **Language**: American English for all code, comments, and documentation (use sentence case, including titles)
|
- **Language**: American English for all code, comments, and documentation (use sentence case, including titles)
|
||||||
|
|
||||||
|
|||||||
12
.github/workflows/builder.yml
vendored
12
.github/workflows/builder.yml
vendored
@@ -190,7 +190,7 @@ jobs:
|
|||||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -257,7 +257,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -326,20 +326,20 @@ jobs:
|
|||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@d7543c93d881b35a8faa02e8e3605f69b7a1ce62 # v3.10.0
|
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||||
with:
|
with:
|
||||||
cosign-release: "v2.2.3"
|
cosign-release: "v2.2.3"
|
||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: matrix.registry == 'docker.io/homeassistant'
|
if: matrix.registry == 'docker.io/homeassistant'
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -504,7 +504,7 @@ jobs:
|
|||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|||||||
738
.github/workflows/ci.yaml
vendored
738
.github/workflows/ci.yaml
vendored
File diff suppressed because it is too large
Load Diff
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
|||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
uses: github/codeql-action/init@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@192325c86100d080feab897ff886c34abd4c83a3 # v3.30.3
|
uses: github/codeql-action/analyze@16140ae1a102900babc80a33c44059580f687047 # v4.30.9
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
|||||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
|||||||
# - No PRs marked as no-stale
|
# - No PRs marked as no-stale
|
||||||
# - No issues (-1)
|
# - No issues (-1)
|
||||||
- name: 60 days stale PRs policy
|
- name: 60 days stale PRs policy
|
||||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
days-before-stale: 60
|
days-before-stale: 60
|
||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
# - No issues marked as no-stale or help-wanted
|
# - No issues marked as no-stale or help-wanted
|
||||||
# - No PRs (-1)
|
# - No PRs (-1)
|
||||||
- name: 90 days stale issues
|
- name: 90 days stale issues
|
||||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ steps.token.outputs.token }}
|
repo-token: ${{ steps.token.outputs.token }}
|
||||||
days-before-stale: 90
|
days-before-stale: 90
|
||||||
@@ -87,7 +87,7 @@ jobs:
|
|||||||
# - No Issues marked as no-stale or help-wanted
|
# - No Issues marked as no-stale or help-wanted
|
||||||
# - No PRs (-1)
|
# - No PRs (-1)
|
||||||
- name: Needs more information stale issues policy
|
- name: Needs more information stale issues policy
|
||||||
uses: actions/stale@3a9db7e6a41a89f618792c92c0e97cc736e1b13f # v10.0.0
|
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||||
with:
|
with:
|
||||||
repo-token: ${{ steps.token.outputs.token }}
|
repo-token: ${{ steps.token.outputs.token }}
|
||||||
only-labels: "needs-more-information"
|
only-labels: "needs-more-information"
|
||||||
|
|||||||
78
.github/workflows/wheels.yml
vendored
78
.github/workflows/wheels.yml
vendored
@@ -31,7 +31,8 @@ jobs:
|
|||||||
outputs:
|
outputs:
|
||||||
architectures: ${{ steps.info.outputs.architectures }}
|
architectures: ${{ steps.info.outputs.architectures }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- &checkout
|
||||||
|
name: Checkout the repository
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
@@ -91,7 +92,7 @@ jobs:
|
|||||||
) > build_constraints.txt
|
) > build_constraints.txt
|
||||||
|
|
||||||
- name: Upload env_file
|
- name: Upload env_file
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: &actions-upload-artifact actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
path: ./.env_file
|
path: ./.env_file
|
||||||
@@ -99,14 +100,14 @@ jobs:
|
|||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload build_constraints
|
- name: Upload build_constraints
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: *actions-upload-artifact
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
path: ./build_constraints.txt
|
path: ./build_constraints.txt
|
||||||
overwrite: true
|
overwrite: true
|
||||||
|
|
||||||
- name: Upload requirements_diff
|
- name: Upload requirements_diff
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: *actions-upload-artifact
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
path: ./requirements_diff.txt
|
path: ./requirements_diff.txt
|
||||||
@@ -118,7 +119,7 @@ jobs:
|
|||||||
python -m script.gen_requirements_all ci
|
python -m script.gen_requirements_all ci
|
||||||
|
|
||||||
- name: Upload requirements_all_wheels
|
- name: Upload requirements_all_wheels
|
||||||
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
|
uses: *actions-upload-artifact
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
path: ./requirements_all_wheels_*.txt
|
path: ./requirements_all_wheels_*.txt
|
||||||
@@ -127,28 +128,41 @@ jobs:
|
|||||||
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
name: Build Core wheels ${{ matrix.abi }} for ${{ matrix.arch }} (musllinux_1_2)
|
||||||
if: github.repository_owner == 'home-assistant'
|
if: github.repository_owner == 'home-assistant'
|
||||||
needs: init
|
needs: init
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix: &matrix-build
|
||||||
abi: ["cp313"]
|
abi: ["cp313", "cp314"]
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
|
include:
|
||||||
|
- os: ubuntu-latest
|
||||||
|
- arch: aarch64
|
||||||
|
os: ubuntu-24.04-arm
|
||||||
|
exclude:
|
||||||
|
- abi: cp314
|
||||||
|
arch: armv7
|
||||||
|
- abi: cp314
|
||||||
|
arch: armhf
|
||||||
|
- abi: cp314
|
||||||
|
arch: i386
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- *checkout
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
|
|
||||||
- name: Download env_file
|
- &download-env-file
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
name: Download env_file
|
||||||
|
uses: &actions-download-artifact actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download build_constraints
|
- &download-build-constraints
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
name: Download build_constraints
|
||||||
|
uses: *actions-download-artifact
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- &download-requirements-diff
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
name: Download requirements_diff
|
||||||
|
uses: *actions-download-artifact
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
@@ -160,7 +174,7 @@ jobs:
|
|||||||
|
|
||||||
# home-assistant/wheels doesn't support sha pinning
|
# home-assistant/wheels doesn't support sha pinning
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2025.07.0
|
uses: &home-assistant-wheels home-assistant/wheels@2025.10.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@@ -177,33 +191,19 @@ jobs:
|
|||||||
name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }}
|
name: Build wheels ${{ matrix.abi }} for ${{ matrix.arch }}
|
||||||
if: github.repository_owner == 'home-assistant'
|
if: github.repository_owner == 'home-assistant'
|
||||||
needs: init
|
needs: init
|
||||||
runs-on: ubuntu-latest
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix: *matrix-build
|
||||||
abi: ["cp313"]
|
|
||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- *checkout
|
||||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
|
||||||
|
|
||||||
- name: Download env_file
|
- *download-env-file
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
- *download-build-constraints
|
||||||
with:
|
- *download-requirements-diff
|
||||||
name: env_file
|
|
||||||
|
|
||||||
- name: Download build_constraints
|
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
|
||||||
with:
|
|
||||||
name: build_constraints
|
|
||||||
|
|
||||||
- name: Download requirements_diff
|
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
|
||||||
with:
|
|
||||||
name: requirements_diff
|
|
||||||
|
|
||||||
- name: Download requirements_all_wheels
|
- name: Download requirements_all_wheels
|
||||||
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
|
uses: *actions-download-artifact
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
|
|
||||||
@@ -221,7 +221,7 @@ jobs:
|
|||||||
|
|
||||||
# home-assistant/wheels doesn't support sha pinning
|
# home-assistant/wheels doesn't support sha pinning
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2025.07.0
|
uses: *home-assistant-wheels
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
|
|||||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -79,7 +79,6 @@ junit.xml
|
|||||||
.project
|
.project
|
||||||
.pydevproject
|
.pydevproject
|
||||||
|
|
||||||
.python-version
|
|
||||||
.tool-versions
|
.tool-versions
|
||||||
|
|
||||||
# emacs auto backups
|
# emacs auto backups
|
||||||
|
|||||||
1
.python-version
Normal file
1
.python-version
Normal file
@@ -0,0 +1 @@
|
|||||||
|
3.13
|
||||||
@@ -182,7 +182,6 @@ homeassistant.components.efergy.*
|
|||||||
homeassistant.components.eheimdigital.*
|
homeassistant.components.eheimdigital.*
|
||||||
homeassistant.components.electrasmart.*
|
homeassistant.components.electrasmart.*
|
||||||
homeassistant.components.electric_kiwi.*
|
homeassistant.components.electric_kiwi.*
|
||||||
homeassistant.components.elevenlabs.*
|
|
||||||
homeassistant.components.elgato.*
|
homeassistant.components.elgato.*
|
||||||
homeassistant.components.elkm1.*
|
homeassistant.components.elkm1.*
|
||||||
homeassistant.components.emulated_hue.*
|
homeassistant.components.emulated_hue.*
|
||||||
@@ -203,6 +202,7 @@ homeassistant.components.feedreader.*
|
|||||||
homeassistant.components.file_upload.*
|
homeassistant.components.file_upload.*
|
||||||
homeassistant.components.filesize.*
|
homeassistant.components.filesize.*
|
||||||
homeassistant.components.filter.*
|
homeassistant.components.filter.*
|
||||||
|
homeassistant.components.firefly_iii.*
|
||||||
homeassistant.components.fitbit.*
|
homeassistant.components.fitbit.*
|
||||||
homeassistant.components.flexit_bacnet.*
|
homeassistant.components.flexit_bacnet.*
|
||||||
homeassistant.components.flux_led.*
|
homeassistant.components.flux_led.*
|
||||||
@@ -220,6 +220,7 @@ homeassistant.components.generic_thermostat.*
|
|||||||
homeassistant.components.geo_location.*
|
homeassistant.components.geo_location.*
|
||||||
homeassistant.components.geocaching.*
|
homeassistant.components.geocaching.*
|
||||||
homeassistant.components.gios.*
|
homeassistant.components.gios.*
|
||||||
|
homeassistant.components.github.*
|
||||||
homeassistant.components.glances.*
|
homeassistant.components.glances.*
|
||||||
homeassistant.components.go2rtc.*
|
homeassistant.components.go2rtc.*
|
||||||
homeassistant.components.goalzero.*
|
homeassistant.components.goalzero.*
|
||||||
@@ -325,6 +326,7 @@ homeassistant.components.london_underground.*
|
|||||||
homeassistant.components.lookin.*
|
homeassistant.components.lookin.*
|
||||||
homeassistant.components.lovelace.*
|
homeassistant.components.lovelace.*
|
||||||
homeassistant.components.luftdaten.*
|
homeassistant.components.luftdaten.*
|
||||||
|
homeassistant.components.lunatone.*
|
||||||
homeassistant.components.madvr.*
|
homeassistant.components.madvr.*
|
||||||
homeassistant.components.manual.*
|
homeassistant.components.manual.*
|
||||||
homeassistant.components.mastodon.*
|
homeassistant.components.mastodon.*
|
||||||
@@ -443,6 +445,7 @@ homeassistant.components.rituals_perfume_genie.*
|
|||||||
homeassistant.components.roborock.*
|
homeassistant.components.roborock.*
|
||||||
homeassistant.components.roku.*
|
homeassistant.components.roku.*
|
||||||
homeassistant.components.romy.*
|
homeassistant.components.romy.*
|
||||||
|
homeassistant.components.route_b_smart_meter.*
|
||||||
homeassistant.components.rpi_power.*
|
homeassistant.components.rpi_power.*
|
||||||
homeassistant.components.rss_feed_template.*
|
homeassistant.components.rss_feed_template.*
|
||||||
homeassistant.components.russound_rio.*
|
homeassistant.components.russound_rio.*
|
||||||
@@ -474,6 +477,7 @@ homeassistant.components.skybell.*
|
|||||||
homeassistant.components.slack.*
|
homeassistant.components.slack.*
|
||||||
homeassistant.components.sleep_as_android.*
|
homeassistant.components.sleep_as_android.*
|
||||||
homeassistant.components.sleepiq.*
|
homeassistant.components.sleepiq.*
|
||||||
|
homeassistant.components.sma.*
|
||||||
homeassistant.components.smhi.*
|
homeassistant.components.smhi.*
|
||||||
homeassistant.components.smlight.*
|
homeassistant.components.smlight.*
|
||||||
homeassistant.components.smtp.*
|
homeassistant.components.smtp.*
|
||||||
@@ -552,6 +556,7 @@ homeassistant.components.vacuum.*
|
|||||||
homeassistant.components.vallox.*
|
homeassistant.components.vallox.*
|
||||||
homeassistant.components.valve.*
|
homeassistant.components.valve.*
|
||||||
homeassistant.components.velbus.*
|
homeassistant.components.velbus.*
|
||||||
|
homeassistant.components.vivotek.*
|
||||||
homeassistant.components.vlc_telnet.*
|
homeassistant.components.vlc_telnet.*
|
||||||
homeassistant.components.vodafone_station.*
|
homeassistant.components.vodafone_station.*
|
||||||
homeassistant.components.volvo.*
|
homeassistant.components.volvo.*
|
||||||
|
|||||||
2
.vscode/settings.default.json
vendored
2
.vscode/settings.default.json
vendored
@@ -7,6 +7,8 @@
|
|||||||
"python.testing.pytestEnabled": false,
|
"python.testing.pytestEnabled": false,
|
||||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||||
"pylint.importStrategy": "fromEnvironment",
|
"pylint.importStrategy": "fromEnvironment",
|
||||||
|
// Pyright is too pedantic for Home Assistant
|
||||||
|
"python.analysis.typeCheckingMode": "basic",
|
||||||
"json.schemas": [
|
"json.schemas": [
|
||||||
{
|
{
|
||||||
"fileMatch": [
|
"fileMatch": [
|
||||||
|
|||||||
36
CODEOWNERS
generated
36
CODEOWNERS
generated
@@ -46,6 +46,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/accuweather/ @bieniu
|
/tests/components/accuweather/ @bieniu
|
||||||
/homeassistant/components/acmeda/ @atmurray
|
/homeassistant/components/acmeda/ @atmurray
|
||||||
/tests/components/acmeda/ @atmurray
|
/tests/components/acmeda/ @atmurray
|
||||||
|
/homeassistant/components/actron_air/ @kclif9 @JagadishDhanamjayam
|
||||||
|
/tests/components/actron_air/ @kclif9 @JagadishDhanamjayam
|
||||||
/homeassistant/components/adax/ @danielhiversen @lazytarget
|
/homeassistant/components/adax/ @danielhiversen @lazytarget
|
||||||
/tests/components/adax/ @danielhiversen @lazytarget
|
/tests/components/adax/ @danielhiversen @lazytarget
|
||||||
/homeassistant/components/adguard/ @frenck
|
/homeassistant/components/adguard/ @frenck
|
||||||
@@ -316,6 +318,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||||
/homeassistant/components/cups/ @fabaff
|
/homeassistant/components/cups/ @fabaff
|
||||||
/tests/components/cups/ @fabaff
|
/tests/components/cups/ @fabaff
|
||||||
|
/homeassistant/components/cync/ @Kinachi249
|
||||||
|
/tests/components/cync/ @Kinachi249
|
||||||
/homeassistant/components/daikin/ @fredrike
|
/homeassistant/components/daikin/ @fredrike
|
||||||
/tests/components/daikin/ @fredrike
|
/tests/components/daikin/ @fredrike
|
||||||
/homeassistant/components/date/ @home-assistant/core
|
/homeassistant/components/date/ @home-assistant/core
|
||||||
@@ -410,6 +414,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/egardia/ @jeroenterheerdt
|
/homeassistant/components/egardia/ @jeroenterheerdt
|
||||||
/homeassistant/components/eheimdigital/ @autinerd
|
/homeassistant/components/eheimdigital/ @autinerd
|
||||||
/tests/components/eheimdigital/ @autinerd
|
/tests/components/eheimdigital/ @autinerd
|
||||||
|
/homeassistant/components/ekeybionyx/ @richardpolzer
|
||||||
|
/tests/components/ekeybionyx/ @richardpolzer
|
||||||
/homeassistant/components/electrasmart/ @jafar-atili
|
/homeassistant/components/electrasmart/ @jafar-atili
|
||||||
/tests/components/electrasmart/ @jafar-atili
|
/tests/components/electrasmart/ @jafar-atili
|
||||||
/homeassistant/components/electric_kiwi/ @mikey0000
|
/homeassistant/components/electric_kiwi/ @mikey0000
|
||||||
@@ -488,6 +494,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/filesize/ @gjohansson-ST
|
/tests/components/filesize/ @gjohansson-ST
|
||||||
/homeassistant/components/filter/ @dgomes
|
/homeassistant/components/filter/ @dgomes
|
||||||
/tests/components/filter/ @dgomes
|
/tests/components/filter/ @dgomes
|
||||||
|
/homeassistant/components/firefly_iii/ @erwindouna
|
||||||
|
/tests/components/firefly_iii/ @erwindouna
|
||||||
/homeassistant/components/fireservicerota/ @cyberjunky
|
/homeassistant/components/fireservicerota/ @cyberjunky
|
||||||
/tests/components/fireservicerota/ @cyberjunky
|
/tests/components/fireservicerota/ @cyberjunky
|
||||||
/homeassistant/components/firmata/ @DaAwesomeP
|
/homeassistant/components/firmata/ @DaAwesomeP
|
||||||
@@ -611,6 +619,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/greeneye_monitor/ @jkeljo
|
/tests/components/greeneye_monitor/ @jkeljo
|
||||||
/homeassistant/components/group/ @home-assistant/core
|
/homeassistant/components/group/ @home-assistant/core
|
||||||
/tests/components/group/ @home-assistant/core
|
/tests/components/group/ @home-assistant/core
|
||||||
|
/homeassistant/components/growatt_server/ @johanzander
|
||||||
|
/tests/components/growatt_server/ @johanzander
|
||||||
/homeassistant/components/guardian/ @bachya
|
/homeassistant/components/guardian/ @bachya
|
||||||
/tests/components/guardian/ @bachya
|
/tests/components/guardian/ @bachya
|
||||||
/homeassistant/components/habitica/ @tr4nt0r
|
/homeassistant/components/habitica/ @tr4nt0r
|
||||||
@@ -756,8 +766,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||||
/homeassistant/components/intesishome/ @jnimmo
|
/homeassistant/components/intesishome/ @jnimmo
|
||||||
/homeassistant/components/iometer/ @MaestroOnICe
|
/homeassistant/components/iometer/ @jukrebs
|
||||||
/tests/components/iometer/ @MaestroOnICe
|
/tests/components/iometer/ @jukrebs
|
||||||
/homeassistant/components/ios/ @robbiet480
|
/homeassistant/components/ios/ @robbiet480
|
||||||
/tests/components/ios/ @robbiet480
|
/tests/components/ios/ @robbiet480
|
||||||
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
/homeassistant/components/iotawatt/ @gtdiehl @jyavenard
|
||||||
@@ -904,6 +914,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/luci/ @mzdrale
|
/homeassistant/components/luci/ @mzdrale
|
||||||
/homeassistant/components/luftdaten/ @fabaff @frenck
|
/homeassistant/components/luftdaten/ @fabaff @frenck
|
||||||
/tests/components/luftdaten/ @fabaff @frenck
|
/tests/components/luftdaten/ @fabaff @frenck
|
||||||
|
/homeassistant/components/lunatone/ @MoonDevLT
|
||||||
|
/tests/components/lunatone/ @MoonDevLT
|
||||||
/homeassistant/components/lupusec/ @majuss @suaveolent
|
/homeassistant/components/lupusec/ @majuss @suaveolent
|
||||||
/tests/components/lupusec/ @majuss @suaveolent
|
/tests/components/lupusec/ @majuss @suaveolent
|
||||||
/homeassistant/components/lutron/ @cdheiser @wilburCForce
|
/homeassistant/components/lutron/ @cdheiser @wilburCForce
|
||||||
@@ -949,6 +961,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/met_eireann/ @DylanGore
|
/tests/components/met_eireann/ @DylanGore
|
||||||
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
|
/homeassistant/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
|
||||||
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
|
/tests/components/meteo_france/ @hacf-fr @oncleben31 @Quentame
|
||||||
|
/homeassistant/components/meteo_lt/ @xE1H
|
||||||
|
/tests/components/meteo_lt/ @xE1H
|
||||||
/homeassistant/components/meteoalarm/ @rolfberkenbosch
|
/homeassistant/components/meteoalarm/ @rolfberkenbosch
|
||||||
/homeassistant/components/meteoclimatic/ @adrianmo
|
/homeassistant/components/meteoclimatic/ @adrianmo
|
||||||
/tests/components/meteoclimatic/ @adrianmo
|
/tests/components/meteoclimatic/ @adrianmo
|
||||||
@@ -972,8 +986,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/moat/ @bdraco
|
/tests/components/moat/ @bdraco
|
||||||
/homeassistant/components/mobile_app/ @home-assistant/core
|
/homeassistant/components/mobile_app/ @home-assistant/core
|
||||||
/tests/components/mobile_app/ @home-assistant/core
|
/tests/components/mobile_app/ @home-assistant/core
|
||||||
/homeassistant/components/modbus/ @janiversen
|
|
||||||
/tests/components/modbus/ @janiversen
|
|
||||||
/homeassistant/components/modem_callerid/ @tkdrob
|
/homeassistant/components/modem_callerid/ @tkdrob
|
||||||
/tests/components/modem_callerid/ @tkdrob
|
/tests/components/modem_callerid/ @tkdrob
|
||||||
/homeassistant/components/modern_forms/ @wonderslug
|
/homeassistant/components/modern_forms/ @wonderslug
|
||||||
@@ -1057,6 +1069,8 @@ build.json @home-assistant/supervisor
|
|||||||
/homeassistant/components/nilu/ @hfurubotten
|
/homeassistant/components/nilu/ @hfurubotten
|
||||||
/homeassistant/components/nina/ @DeerMaximum
|
/homeassistant/components/nina/ @DeerMaximum
|
||||||
/tests/components/nina/ @DeerMaximum
|
/tests/components/nina/ @DeerMaximum
|
||||||
|
/homeassistant/components/nintendo_parental_controls/ @pantherale0
|
||||||
|
/tests/components/nintendo_parental_controls/ @pantherale0
|
||||||
/homeassistant/components/nissan_leaf/ @filcole
|
/homeassistant/components/nissan_leaf/ @filcole
|
||||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||||
@@ -1125,6 +1139,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/opengarage/ @danielhiversen
|
/tests/components/opengarage/ @danielhiversen
|
||||||
/homeassistant/components/openhome/ @bazwilliams
|
/homeassistant/components/openhome/ @bazwilliams
|
||||||
/tests/components/openhome/ @bazwilliams
|
/tests/components/openhome/ @bazwilliams
|
||||||
|
/homeassistant/components/openrgb/ @felipecrs
|
||||||
|
/tests/components/openrgb/ @felipecrs
|
||||||
/homeassistant/components/opensky/ @joostlek
|
/homeassistant/components/opensky/ @joostlek
|
||||||
/tests/components/opensky/ @joostlek
|
/tests/components/opensky/ @joostlek
|
||||||
/homeassistant/components/opentherm_gw/ @mvn23
|
/homeassistant/components/opentherm_gw/ @mvn23
|
||||||
@@ -1188,8 +1204,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/plex/ @jjlawren
|
/tests/components/plex/ @jjlawren
|
||||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
|
||||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
|
||||||
/homeassistant/components/point/ @fredrike
|
/homeassistant/components/point/ @fredrike
|
||||||
/tests/components/point/ @fredrike
|
/tests/components/point/ @fredrike
|
||||||
/homeassistant/components/pooldose/ @lmaertin
|
/homeassistant/components/pooldose/ @lmaertin
|
||||||
@@ -1332,6 +1346,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
/tests/components/roomba/ @pschmitt @cyr-ius @shenxn @Orhideous
|
||||||
/homeassistant/components/roon/ @pavoni
|
/homeassistant/components/roon/ @pavoni
|
||||||
/tests/components/roon/ @pavoni
|
/tests/components/roon/ @pavoni
|
||||||
|
/homeassistant/components/route_b_smart_meter/ @SeraphicRav
|
||||||
|
/tests/components/route_b_smart_meter/ @SeraphicRav
|
||||||
/homeassistant/components/rpi_power/ @shenxn @swetoast
|
/homeassistant/components/rpi_power/ @shenxn @swetoast
|
||||||
/tests/components/rpi_power/ @shenxn @swetoast
|
/tests/components/rpi_power/ @shenxn @swetoast
|
||||||
/homeassistant/components/rss_feed_template/ @home-assistant/core
|
/homeassistant/components/rss_feed_template/ @home-assistant/core
|
||||||
@@ -1403,8 +1419,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/sfr_box/ @epenet
|
/tests/components/sfr_box/ @epenet
|
||||||
/homeassistant/components/sftp_storage/ @maretodoric
|
/homeassistant/components/sftp_storage/ @maretodoric
|
||||||
/tests/components/sftp_storage/ @maretodoric
|
/tests/components/sftp_storage/ @maretodoric
|
||||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
/homeassistant/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
|
||||||
/tests/components/sharkiq/ @JeffResc @funkybunch
|
/tests/components/sharkiq/ @JeffResc @funkybunch @TheOneOgre
|
||||||
/homeassistant/components/shell_command/ @home-assistant/core
|
/homeassistant/components/shell_command/ @home-assistant/core
|
||||||
/tests/components/shell_command/ @home-assistant/core
|
/tests/components/shell_command/ @home-assistant/core
|
||||||
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||||
@@ -1469,8 +1485,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/snoo/ @Lash-L
|
/tests/components/snoo/ @Lash-L
|
||||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||||
/tests/components/snooz/ @AustinBrunkhorst
|
/tests/components/snooz/ @AustinBrunkhorst
|
||||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
/homeassistant/components/solaredge/ @frenck @bdraco @tronikos
|
||||||
/tests/components/solaredge/ @frenck @bdraco
|
/tests/components/solaredge/ @frenck @bdraco @tronikos
|
||||||
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
/homeassistant/components/solaredge_local/ @drobtravels @scheric
|
||||||
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
|
/homeassistant/components/solarlog/ @Ernst79 @dontinelli
|
||||||
/tests/components/solarlog/ @Ernst79 @dontinelli
|
/tests/components/solarlog/ @Ernst79 @dontinelli
|
||||||
|
|||||||
17
Dockerfile
generated
17
Dockerfile
generated
@@ -15,23 +15,10 @@ ARG QEMU_CPU
|
|||||||
# Home Assistant S6-Overlay
|
# Home Assistant S6-Overlay
|
||||||
COPY rootfs /
|
COPY rootfs /
|
||||||
|
|
||||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
COPY --from=ghcr.io/alexxit/go2rtc:1.9.11 /usr/local/bin/go2rtc /bin/go2rtc
|
||||||
ARG BUILD_ARCH
|
|
||||||
# Get go2rtc binary
|
|
||||||
RUN \
|
|
||||||
case "${BUILD_ARCH}" in \
|
|
||||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
|
||||||
"armhf") go2rtc_suffix='armv6' ;; \
|
|
||||||
"armv7") go2rtc_suffix='arm' ;; \
|
|
||||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
|
||||||
esac \
|
|
||||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.9/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
|
||||||
&& chmod +x /bin/go2rtc \
|
|
||||||
# Verify go2rtc can be executed
|
|
||||||
&& go2rtc --version
|
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.8.9
|
RUN pip3 install uv==0.9.5
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
|||||||
@@ -34,9 +34,11 @@ WORKDIR /usr/src
|
|||||||
|
|
||||||
COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||||
|
|
||||||
RUN uv python install 3.13.2
|
|
||||||
|
|
||||||
USER vscode
|
USER vscode
|
||||||
|
|
||||||
|
COPY .python-version ./
|
||||||
|
RUN uv python install
|
||||||
|
|
||||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||||
RUN uv venv $VIRTUAL_ENV
|
RUN uv venv $VIRTUAL_ENV
|
||||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||||
|
|||||||
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
|||||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||||
build_from:
|
build_from:
|
||||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
|
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.10.1
|
||||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
|
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.10.1
|
||||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
|
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.10.1
|
||||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
|
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.10.1
|
||||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
|
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.10.1
|
||||||
codenotary:
|
codenotary:
|
||||||
signer: notary@home-assistant.io
|
signer: notary@home-assistant.io
|
||||||
base_image: notary@home-assistant.io
|
base_image: notary@home-assistant.io
|
||||||
|
|||||||
@@ -34,6 +34,9 @@ INPUT_FIELD_CODE = "code"
|
|||||||
|
|
||||||
DUMMY_SECRET = "FPPTH34D4E3MI2HG"
|
DUMMY_SECRET = "FPPTH34D4E3MI2HG"
|
||||||
|
|
||||||
|
GOOGLE_AUTHENTICATOR_URL = "https://support.google.com/accounts/answer/1066447"
|
||||||
|
AUTHY_URL = "https://authy.com/"
|
||||||
|
|
||||||
|
|
||||||
def _generate_qr_code(data: str) -> str:
|
def _generate_qr_code(data: str) -> str:
|
||||||
"""Generate a base64 PNG string represent QR Code image of data."""
|
"""Generate a base64 PNG string represent QR Code image of data."""
|
||||||
@@ -229,6 +232,8 @@ class TotpSetupFlow(SetupFlow[TotpAuthModule]):
|
|||||||
"code": self._ota_secret,
|
"code": self._ota_secret,
|
||||||
"url": self._url,
|
"url": self._url,
|
||||||
"qr_code": self._image,
|
"qr_code": self._image,
|
||||||
|
"google_authenticator_url": GOOGLE_AUTHENTICATOR_URL,
|
||||||
|
"authy_url": AUTHY_URL,
|
||||||
},
|
},
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -616,34 +616,34 @@ async def async_enable_logging(
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Log errors to a file if we have write access to file or config dir
|
logger = logging.getLogger()
|
||||||
|
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
||||||
|
|
||||||
if log_file is None:
|
if log_file is None:
|
||||||
err_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
default_log_path = hass.config.path(ERROR_LOG_FILENAME)
|
||||||
|
if "SUPERVISOR" in os.environ:
|
||||||
|
_LOGGER.info("Running in Supervisor, not logging to file")
|
||||||
|
# Rename the default log file if it exists, since previous versions created
|
||||||
|
# it even on Supervisor
|
||||||
|
if os.path.isfile(default_log_path):
|
||||||
|
with contextlib.suppress(OSError):
|
||||||
|
os.rename(default_log_path, f"{default_log_path}.old")
|
||||||
|
err_log_path = None
|
||||||
|
else:
|
||||||
|
err_log_path = default_log_path
|
||||||
else:
|
else:
|
||||||
err_log_path = os.path.abspath(log_file)
|
err_log_path = os.path.abspath(log_file)
|
||||||
|
|
||||||
err_path_exists = os.path.isfile(err_log_path)
|
if err_log_path:
|
||||||
err_dir = os.path.dirname(err_log_path)
|
|
||||||
|
|
||||||
# Check if we can write to the error log if it exists or that
|
|
||||||
# we can create files in the containing directory if not.
|
|
||||||
if (err_path_exists and os.access(err_log_path, os.W_OK)) or (
|
|
||||||
not err_path_exists and os.access(err_dir, os.W_OK)
|
|
||||||
):
|
|
||||||
err_handler = await hass.async_add_executor_job(
|
err_handler = await hass.async_add_executor_job(
|
||||||
_create_log_file, err_log_path, log_rotate_days
|
_create_log_file, err_log_path, log_rotate_days
|
||||||
)
|
)
|
||||||
|
|
||||||
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME))
|
||||||
|
|
||||||
logger = logging.getLogger()
|
|
||||||
logger.addHandler(err_handler)
|
logger.addHandler(err_handler)
|
||||||
logger.setLevel(logging.INFO if verbose else logging.WARNING)
|
|
||||||
|
|
||||||
# Save the log file location for access by other components.
|
# Save the log file location for access by other components.
|
||||||
hass.data[DATA_LOGGING] = err_log_path
|
hass.data[DATA_LOGGING] = err_log_path
|
||||||
else:
|
|
||||||
_LOGGER.error("Unable to set up error log %s (access denied)", err_log_path)
|
|
||||||
|
|
||||||
async_activate_log_queue_handler(hass)
|
async_activate_log_queue_handler(hass)
|
||||||
|
|
||||||
|
|||||||
5
homeassistant/brands/eltako.json
Normal file
5
homeassistant/brands/eltako.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "eltako",
|
||||||
|
"name": "Eltako",
|
||||||
|
"iot_standards": ["matter"]
|
||||||
|
}
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
{
|
|
||||||
"domain": "ibm",
|
|
||||||
"name": "IBM",
|
|
||||||
"integrations": ["watson_iot", "watson_tts"]
|
|
||||||
}
|
|
||||||
5
homeassistant/brands/konnected.json
Normal file
5
homeassistant/brands/konnected.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "konnected",
|
||||||
|
"name": "Konnected",
|
||||||
|
"integrations": ["konnected", "konnected_esphome"]
|
||||||
|
}
|
||||||
5
homeassistant/brands/level.json
Normal file
5
homeassistant/brands/level.json
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"domain": "level",
|
||||||
|
"name": "Level",
|
||||||
|
"iot_standards": ["matter"]
|
||||||
|
}
|
||||||
@@ -4,21 +4,21 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
from typing import cast
|
|
||||||
|
|
||||||
from aioacaia.acaiascale import AcaiaScale
|
from aioacaia.acaiascale import AcaiaScale
|
||||||
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
from aioacaia.exceptions import AcaiaDeviceNotFound, AcaiaError
|
||||||
from bleak import BleakScanner
|
|
||||||
|
|
||||||
from homeassistant.components.bluetooth import async_get_scanner
|
from homeassistant.components.bluetooth import async_get_scanner
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_ADDRESS
|
from homeassistant.const import CONF_ADDRESS
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.debounce import Debouncer
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
|
||||||
from .const import CONF_IS_NEW_STYLE_SCALE
|
from .const import CONF_IS_NEW_STYLE_SCALE
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(seconds=15)
|
SCAN_INTERVAL = timedelta(seconds=15)
|
||||||
|
UPDATE_DEBOUNCE_TIME = 0.2
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -40,12 +40,20 @@ class AcaiaCoordinator(DataUpdateCoordinator[None]):
|
|||||||
config_entry=entry,
|
config_entry=entry,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
debouncer = Debouncer(
|
||||||
|
hass=hass,
|
||||||
|
logger=_LOGGER,
|
||||||
|
cooldown=UPDATE_DEBOUNCE_TIME,
|
||||||
|
immediate=True,
|
||||||
|
function=self.async_update_listeners,
|
||||||
|
)
|
||||||
|
|
||||||
self._scale = AcaiaScale(
|
self._scale = AcaiaScale(
|
||||||
address_or_ble_device=entry.data[CONF_ADDRESS],
|
address_or_ble_device=entry.data[CONF_ADDRESS],
|
||||||
name=entry.title,
|
name=entry.title,
|
||||||
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
is_new_style_scale=entry.data[CONF_IS_NEW_STYLE_SCALE],
|
||||||
notify_callback=self.async_update_listeners,
|
notify_callback=debouncer.async_schedule_call,
|
||||||
scanner=cast(BleakScanner, async_get_scanner(hass)),
|
scanner=async_get_scanner(hass),
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
|||||||
@@ -71,4 +71,4 @@ POLLEN_CATEGORY_MAP = {
|
|||||||
}
|
}
|
||||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||||
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(hours=30)
|
UPDATE_INTERVAL_HOURLY_FORECAST = timedelta(minutes=30)
|
||||||
|
|||||||
@@ -1,6 +1,9 @@
|
|||||||
{
|
{
|
||||||
"entity": {
|
"entity": {
|
||||||
"sensor": {
|
"sensor": {
|
||||||
|
"air_quality": {
|
||||||
|
"default": "mdi:air-filter"
|
||||||
|
},
|
||||||
"cloud_ceiling": {
|
"cloud_ceiling": {
|
||||||
"default": "mdi:weather-fog"
|
"default": "mdi:weather-fog"
|
||||||
},
|
},
|
||||||
@@ -34,9 +37,6 @@
|
|||||||
"thunderstorm_probability_night": {
|
"thunderstorm_probability_night": {
|
||||||
"default": "mdi:weather-lightning"
|
"default": "mdi:weather-lightning"
|
||||||
},
|
},
|
||||||
"translation_key": {
|
|
||||||
"default": "mdi:air-filter"
|
|
||||||
},
|
|
||||||
"tree_pollen": {
|
"tree_pollen": {
|
||||||
"default": "mdi:tree-outline"
|
"default": "mdi:tree-outline"
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -7,5 +7,5 @@
|
|||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["accuweather"],
|
"loggers": ["accuweather"],
|
||||||
"requirements": ["accuweather==4.2.1"]
|
"requirements": ["accuweather==4.2.2"]
|
||||||
}
|
}
|
||||||
|
|||||||
57
homeassistant/components/actron_air/__init__.py
Normal file
57
homeassistant/components/actron_air/__init__.py
Normal file
@@ -0,0 +1,57 @@
|
|||||||
|
"""The Actron Air integration."""
|
||||||
|
|
||||||
|
from actron_neo_api import (
|
||||||
|
ActronAirNeoACSystem,
|
||||||
|
ActronNeoAPI,
|
||||||
|
ActronNeoAPIError,
|
||||||
|
ActronNeoAuthError,
|
||||||
|
)
|
||||||
|
|
||||||
|
from homeassistant.const import CONF_API_TOKEN, Platform
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .const import _LOGGER
|
||||||
|
from .coordinator import (
|
||||||
|
ActronAirConfigEntry,
|
||||||
|
ActronAirRuntimeData,
|
||||||
|
ActronAirSystemCoordinator,
|
||||||
|
)
|
||||||
|
|
||||||
|
PLATFORM = [Platform.CLIMATE]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||||
|
"""Set up Actron Air integration from a config entry."""
|
||||||
|
|
||||||
|
api = ActronNeoAPI(refresh_token=entry.data[CONF_API_TOKEN])
|
||||||
|
systems: list[ActronAirNeoACSystem] = []
|
||||||
|
|
||||||
|
try:
|
||||||
|
systems = await api.get_ac_systems()
|
||||||
|
await api.update_status()
|
||||||
|
except ActronNeoAuthError:
|
||||||
|
_LOGGER.error("Authentication error while setting up Actron Air integration")
|
||||||
|
raise
|
||||||
|
except ActronNeoAPIError as err:
|
||||||
|
_LOGGER.error("API error while setting up Actron Air integration: %s", err)
|
||||||
|
raise
|
||||||
|
|
||||||
|
system_coordinators: dict[str, ActronAirSystemCoordinator] = {}
|
||||||
|
for system in systems:
|
||||||
|
coordinator = ActronAirSystemCoordinator(hass, entry, api, system)
|
||||||
|
_LOGGER.debug("Setting up coordinator for system: %s", system["serial"])
|
||||||
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
system_coordinators[system["serial"]] = coordinator
|
||||||
|
|
||||||
|
entry.runtime_data = ActronAirRuntimeData(
|
||||||
|
api=api,
|
||||||
|
system_coordinators=system_coordinators,
|
||||||
|
)
|
||||||
|
|
||||||
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORM)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_unload_entry(hass: HomeAssistant, entry: ActronAirConfigEntry) -> bool:
|
||||||
|
"""Unload a config entry."""
|
||||||
|
return await hass.config_entries.async_unload_platforms(entry, PLATFORM)
|
||||||
259
homeassistant/components/actron_air/climate.py
Normal file
259
homeassistant/components/actron_air/climate.py
Normal file
@@ -0,0 +1,259 @@
|
|||||||
|
"""Climate platform for Actron Air integration."""
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from actron_neo_api import ActronAirNeoStatus, ActronAirNeoZone
|
||||||
|
|
||||||
|
from homeassistant.components.climate import (
|
||||||
|
FAN_AUTO,
|
||||||
|
FAN_HIGH,
|
||||||
|
FAN_LOW,
|
||||||
|
FAN_MEDIUM,
|
||||||
|
ClimateEntity,
|
||||||
|
ClimateEntityFeature,
|
||||||
|
HVACMode,
|
||||||
|
)
|
||||||
|
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.device_registry import DeviceInfo
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from .const import DOMAIN
|
||||||
|
from .coordinator import ActronAirConfigEntry, ActronAirSystemCoordinator
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
FAN_MODE_MAPPING_ACTRONAIR_TO_HA = {
|
||||||
|
"AUTO": FAN_AUTO,
|
||||||
|
"LOW": FAN_LOW,
|
||||||
|
"MED": FAN_MEDIUM,
|
||||||
|
"HIGH": FAN_HIGH,
|
||||||
|
}
|
||||||
|
FAN_MODE_MAPPING_HA_TO_ACTRONAIR = {
|
||||||
|
v: k for k, v in FAN_MODE_MAPPING_ACTRONAIR_TO_HA.items()
|
||||||
|
}
|
||||||
|
HVAC_MODE_MAPPING_ACTRONAIR_TO_HA = {
|
||||||
|
"COOL": HVACMode.COOL,
|
||||||
|
"HEAT": HVACMode.HEAT,
|
||||||
|
"FAN": HVACMode.FAN_ONLY,
|
||||||
|
"AUTO": HVACMode.AUTO,
|
||||||
|
"OFF": HVACMode.OFF,
|
||||||
|
}
|
||||||
|
HVAC_MODE_MAPPING_HA_TO_ACTRONAIR = {
|
||||||
|
v: k for k, v in HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: ActronAirConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up Actron Air climate entities."""
|
||||||
|
system_coordinators = entry.runtime_data.system_coordinators
|
||||||
|
entities: list[ClimateEntity] = []
|
||||||
|
|
||||||
|
for coordinator in system_coordinators.values():
|
||||||
|
status = coordinator.data
|
||||||
|
name = status.ac_system.system_name
|
||||||
|
entities.append(ActronSystemClimate(coordinator, name))
|
||||||
|
|
||||||
|
entities.extend(
|
||||||
|
ActronZoneClimate(coordinator, zone)
|
||||||
|
for zone in status.remote_zone_info
|
||||||
|
if zone.exists
|
||||||
|
)
|
||||||
|
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
class BaseClimateEntity(CoordinatorEntity[ActronAirSystemCoordinator], ClimateEntity):
|
||||||
|
"""Base class for Actron Air climate entities."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||||
|
_attr_supported_features = (
|
||||||
|
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||||
|
| ClimateEntityFeature.FAN_MODE
|
||||||
|
| ClimateEntityFeature.TURN_ON
|
||||||
|
| ClimateEntityFeature.TURN_OFF
|
||||||
|
)
|
||||||
|
_attr_name = None
|
||||||
|
_attr_fan_modes = list(FAN_MODE_MAPPING_ACTRONAIR_TO_HA.values())
|
||||||
|
_attr_hvac_modes = list(HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.values())
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: ActronAirSystemCoordinator,
|
||||||
|
name: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize an Actron Air unit."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self._serial_number = coordinator.serial_number
|
||||||
|
|
||||||
|
|
||||||
|
class ActronSystemClimate(BaseClimateEntity):
|
||||||
|
"""Representation of the Actron Air system."""
|
||||||
|
|
||||||
|
_attr_supported_features = (
|
||||||
|
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||||
|
| ClimateEntityFeature.FAN_MODE
|
||||||
|
| ClimateEntityFeature.TURN_ON
|
||||||
|
| ClimateEntityFeature.TURN_OFF
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: ActronAirSystemCoordinator,
|
||||||
|
name: str,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize an Actron Air unit."""
|
||||||
|
super().__init__(coordinator, name)
|
||||||
|
serial_number = coordinator.serial_number
|
||||||
|
self._attr_unique_id = serial_number
|
||||||
|
self._attr_device_info = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, serial_number)},
|
||||||
|
name=self._status.ac_system.system_name,
|
||||||
|
manufacturer="Actron Air",
|
||||||
|
model_id=self._status.ac_system.master_wc_model,
|
||||||
|
sw_version=self._status.ac_system.master_wc_firmware_version,
|
||||||
|
serial_number=serial_number,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def min_temp(self) -> float:
|
||||||
|
"""Return the minimum temperature that can be set."""
|
||||||
|
return self._status.min_temp
|
||||||
|
|
||||||
|
@property
|
||||||
|
def max_temp(self) -> float:
|
||||||
|
"""Return the maximum temperature that can be set."""
|
||||||
|
return self._status.max_temp
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _status(self) -> ActronAirNeoStatus:
|
||||||
|
"""Get the current status from the coordinator."""
|
||||||
|
return self.coordinator.data
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_mode(self) -> HVACMode | None:
|
||||||
|
"""Return the current HVAC mode."""
|
||||||
|
if not self._status.user_aircon_settings.is_on:
|
||||||
|
return HVACMode.OFF
|
||||||
|
|
||||||
|
mode = self._status.user_aircon_settings.mode
|
||||||
|
return HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.get(mode)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fan_mode(self) -> str | None:
|
||||||
|
"""Return the current fan mode."""
|
||||||
|
fan_mode = self._status.user_aircon_settings.fan_mode
|
||||||
|
return FAN_MODE_MAPPING_ACTRONAIR_TO_HA.get(fan_mode)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_humidity(self) -> float:
|
||||||
|
"""Return the current humidity."""
|
||||||
|
return self._status.master_info.live_humidity_pc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_temperature(self) -> float:
|
||||||
|
"""Return the current temperature."""
|
||||||
|
return self._status.master_info.live_temp_c
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_temperature(self) -> float:
|
||||||
|
"""Return the target temperature."""
|
||||||
|
return self._status.user_aircon_settings.temperature_setpoint_cool_c
|
||||||
|
|
||||||
|
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||||
|
"""Set a new fan mode."""
|
||||||
|
api_fan_mode = FAN_MODE_MAPPING_HA_TO_ACTRONAIR.get(fan_mode.lower())
|
||||||
|
await self._status.user_aircon_settings.set_fan_mode(api_fan_mode)
|
||||||
|
|
||||||
|
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||||
|
"""Set the HVAC mode."""
|
||||||
|
ac_mode = HVAC_MODE_MAPPING_HA_TO_ACTRONAIR.get(hvac_mode)
|
||||||
|
await self._status.ac_system.set_system_mode(ac_mode)
|
||||||
|
|
||||||
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
|
"""Set the temperature."""
|
||||||
|
temp = kwargs.get(ATTR_TEMPERATURE)
|
||||||
|
await self._status.user_aircon_settings.set_temperature(temperature=temp)
|
||||||
|
|
||||||
|
|
||||||
|
class ActronZoneClimate(BaseClimateEntity):
|
||||||
|
"""Representation of a zone within the Actron Air system."""
|
||||||
|
|
||||||
|
_attr_supported_features = (
|
||||||
|
ClimateEntityFeature.TARGET_TEMPERATURE
|
||||||
|
| ClimateEntityFeature.TURN_ON
|
||||||
|
| ClimateEntityFeature.TURN_OFF
|
||||||
|
)
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: ActronAirSystemCoordinator,
|
||||||
|
zone: ActronAirNeoZone,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize an Actron Air unit."""
|
||||||
|
super().__init__(coordinator, zone.title)
|
||||||
|
serial_number = coordinator.serial_number
|
||||||
|
self._zone_id: int = zone.zone_id
|
||||||
|
self._attr_unique_id: str = f"{serial_number}_zone_{zone.zone_id}"
|
||||||
|
self._attr_device_info: DeviceInfo = DeviceInfo(
|
||||||
|
identifiers={(DOMAIN, self._attr_unique_id)},
|
||||||
|
name=zone.title,
|
||||||
|
manufacturer="Actron Air",
|
||||||
|
model="Zone",
|
||||||
|
suggested_area=zone.title,
|
||||||
|
via_device=(DOMAIN, serial_number),
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def min_temp(self) -> float:
|
||||||
|
"""Return the minimum temperature that can be set."""
|
||||||
|
return self._zone.min_temp
|
||||||
|
|
||||||
|
@property
|
||||||
|
def max_temp(self) -> float:
|
||||||
|
"""Return the maximum temperature that can be set."""
|
||||||
|
return self._zone.max_temp
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _zone(self) -> ActronAirNeoZone:
|
||||||
|
"""Get the current zone data from the coordinator."""
|
||||||
|
status = self.coordinator.data
|
||||||
|
return status.zones[self._zone_id]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hvac_mode(self) -> HVACMode | None:
|
||||||
|
"""Return the current HVAC mode."""
|
||||||
|
if self._zone.is_active:
|
||||||
|
mode = self._zone.hvac_mode
|
||||||
|
return HVAC_MODE_MAPPING_ACTRONAIR_TO_HA.get(mode)
|
||||||
|
return HVACMode.OFF
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_humidity(self) -> float | None:
|
||||||
|
"""Return the current humidity."""
|
||||||
|
return self._zone.humidity
|
||||||
|
|
||||||
|
@property
|
||||||
|
def current_temperature(self) -> float | None:
|
||||||
|
"""Return the current temperature."""
|
||||||
|
return self._zone.live_temp_c
|
||||||
|
|
||||||
|
@property
|
||||||
|
def target_temperature(self) -> float | None:
|
||||||
|
"""Return the target temperature."""
|
||||||
|
return self._zone.temperature_setpoint_cool_c
|
||||||
|
|
||||||
|
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||||
|
"""Set the HVAC mode."""
|
||||||
|
is_enabled = hvac_mode != HVACMode.OFF
|
||||||
|
await self._zone.enable(is_enabled)
|
||||||
|
|
||||||
|
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||||
|
"""Set the temperature."""
|
||||||
|
await self._zone.set_temperature(temperature=kwargs["temperature"])
|
||||||
132
homeassistant/components/actron_air/config_flow.py
Normal file
132
homeassistant/components/actron_air/config_flow.py
Normal file
@@ -0,0 +1,132 @@
|
|||||||
|
"""Setup config flow for Actron Air integration."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from actron_neo_api import ActronNeoAPI, ActronNeoAuthError
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
|
from homeassistant.const import CONF_API_TOKEN
|
||||||
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
|
||||||
|
from .const import _LOGGER, DOMAIN
|
||||||
|
|
||||||
|
|
||||||
|
class ActronAirConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
|
"""Handle a config flow for Actron Air."""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the config flow."""
|
||||||
|
self._api: ActronNeoAPI | None = None
|
||||||
|
self._device_code: str | None = None
|
||||||
|
self._user_code: str = ""
|
||||||
|
self._verification_uri: str = ""
|
||||||
|
self._expires_minutes: str = "30"
|
||||||
|
self.login_task: asyncio.Task | None = None
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle the initial step."""
|
||||||
|
if self._api is None:
|
||||||
|
_LOGGER.debug("Initiating device authorization")
|
||||||
|
self._api = ActronNeoAPI()
|
||||||
|
try:
|
||||||
|
device_code_response = await self._api.request_device_code()
|
||||||
|
except ActronNeoAuthError as err:
|
||||||
|
_LOGGER.error("OAuth2 flow failed: %s", err)
|
||||||
|
return self.async_abort(reason="oauth2_error")
|
||||||
|
|
||||||
|
self._device_code = device_code_response["device_code"]
|
||||||
|
self._user_code = device_code_response["user_code"]
|
||||||
|
self._verification_uri = device_code_response["verification_uri_complete"]
|
||||||
|
self._expires_minutes = str(device_code_response["expires_in"] // 60)
|
||||||
|
|
||||||
|
async def _wait_for_authorization() -> None:
|
||||||
|
"""Wait for the user to authorize the device."""
|
||||||
|
assert self._api is not None
|
||||||
|
assert self._device_code is not None
|
||||||
|
_LOGGER.debug("Waiting for device authorization")
|
||||||
|
try:
|
||||||
|
await self._api.poll_for_token(self._device_code)
|
||||||
|
_LOGGER.debug("Authorization successful")
|
||||||
|
except ActronNeoAuthError as ex:
|
||||||
|
_LOGGER.exception("Error while waiting for device authorization")
|
||||||
|
raise CannotConnect from ex
|
||||||
|
|
||||||
|
_LOGGER.debug("Checking login task")
|
||||||
|
if self.login_task is None:
|
||||||
|
_LOGGER.debug("Creating task for device authorization")
|
||||||
|
self.login_task = self.hass.async_create_task(_wait_for_authorization())
|
||||||
|
|
||||||
|
if self.login_task.done():
|
||||||
|
_LOGGER.debug("Login task is done, checking results")
|
||||||
|
if exception := self.login_task.exception():
|
||||||
|
if isinstance(exception, CannotConnect):
|
||||||
|
return self.async_show_progress_done(
|
||||||
|
next_step_id="connection_error"
|
||||||
|
)
|
||||||
|
return self.async_show_progress_done(next_step_id="timeout")
|
||||||
|
return self.async_show_progress_done(next_step_id="finish_login")
|
||||||
|
|
||||||
|
return self.async_show_progress(
|
||||||
|
step_id="user",
|
||||||
|
progress_action="wait_for_authorization",
|
||||||
|
description_placeholders={
|
||||||
|
"user_code": self._user_code,
|
||||||
|
"verification_uri": self._verification_uri,
|
||||||
|
"expires_minutes": self._expires_minutes,
|
||||||
|
},
|
||||||
|
progress_task=self.login_task,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_finish_login(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle the finalization of login."""
|
||||||
|
_LOGGER.debug("Finalizing authorization")
|
||||||
|
assert self._api is not None
|
||||||
|
|
||||||
|
try:
|
||||||
|
user_data = await self._api.get_user_info()
|
||||||
|
except ActronNeoAuthError as err:
|
||||||
|
_LOGGER.error("Error getting user info: %s", err)
|
||||||
|
return self.async_abort(reason="oauth2_error")
|
||||||
|
|
||||||
|
unique_id = str(user_data["id"])
|
||||||
|
await self.async_set_unique_id(unique_id)
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
return self.async_create_entry(
|
||||||
|
title=user_data["email"],
|
||||||
|
data={CONF_API_TOKEN: self._api.refresh_token_value},
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_timeout(
|
||||||
|
self,
|
||||||
|
user_input: dict[str, Any] | None = None,
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle issues that need transition await from progress step."""
|
||||||
|
if user_input is None:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="timeout",
|
||||||
|
)
|
||||||
|
del self.login_task
|
||||||
|
return await self.async_step_user()
|
||||||
|
|
||||||
|
async def async_step_connection_error(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle connection error from progress step."""
|
||||||
|
if user_input is None:
|
||||||
|
return self.async_show_form(step_id="connection_error")
|
||||||
|
|
||||||
|
# Reset state and try again
|
||||||
|
self._api = None
|
||||||
|
self._device_code = None
|
||||||
|
self.login_task = None
|
||||||
|
return await self.async_step_user()
|
||||||
|
|
||||||
|
|
||||||
|
class CannotConnect(HomeAssistantError):
|
||||||
|
"""Error to indicate we cannot connect."""
|
||||||
6
homeassistant/components/actron_air/const.py
Normal file
6
homeassistant/components/actron_air/const.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
"""Constants used by Actron Air integration."""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__package__)
|
||||||
|
DOMAIN = "actron_air"
|
||||||
69
homeassistant/components/actron_air/coordinator.py
Normal file
69
homeassistant/components/actron_air/coordinator.py
Normal file
@@ -0,0 +1,69 @@
|
|||||||
|
"""Coordinator for Actron Air integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import timedelta
|
||||||
|
|
||||||
|
from actron_neo_api import ActronAirNeoACSystem, ActronAirNeoStatus, ActronNeoAPI
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||||
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
|
from .const import _LOGGER
|
||||||
|
|
||||||
|
STALE_DEVICE_TIMEOUT = timedelta(hours=24)
|
||||||
|
ERROR_NO_SYSTEMS_FOUND = "no_systems_found"
|
||||||
|
ERROR_UNKNOWN = "unknown_error"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ActronAirRuntimeData:
|
||||||
|
"""Runtime data for the Actron Air integration."""
|
||||||
|
|
||||||
|
api: ActronNeoAPI
|
||||||
|
system_coordinators: dict[str, ActronAirSystemCoordinator]
|
||||||
|
|
||||||
|
|
||||||
|
type ActronAirConfigEntry = ConfigEntry[ActronAirRuntimeData]
|
||||||
|
|
||||||
|
AUTH_ERROR_THRESHOLD = 3
|
||||||
|
SCAN_INTERVAL = timedelta(seconds=30)
|
||||||
|
|
||||||
|
|
||||||
|
class ActronAirSystemCoordinator(DataUpdateCoordinator[ActronAirNeoACSystem]):
|
||||||
|
"""System coordinator for Actron Air integration."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: ActronAirConfigEntry,
|
||||||
|
api: ActronNeoAPI,
|
||||||
|
system: ActronAirNeoACSystem,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the coordinator."""
|
||||||
|
super().__init__(
|
||||||
|
hass,
|
||||||
|
_LOGGER,
|
||||||
|
name="Actron Air Status",
|
||||||
|
update_interval=SCAN_INTERVAL,
|
||||||
|
config_entry=entry,
|
||||||
|
)
|
||||||
|
self.system = system
|
||||||
|
self.serial_number = system["serial"]
|
||||||
|
self.api = api
|
||||||
|
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||||
|
self.last_seen = dt_util.utcnow()
|
||||||
|
|
||||||
|
async def _async_update_data(self) -> ActronAirNeoStatus:
|
||||||
|
"""Fetch updates and merge incremental changes into the full state."""
|
||||||
|
await self.api.update_status()
|
||||||
|
self.status = self.api.state_manager.get_status(self.serial_number)
|
||||||
|
self.last_seen = dt_util.utcnow()
|
||||||
|
return self.status
|
||||||
|
|
||||||
|
def is_device_stale(self) -> bool:
|
||||||
|
"""Check if a device is stale (not seen for a while)."""
|
||||||
|
return (dt_util.utcnow() - self.last_seen) > STALE_DEVICE_TIMEOUT
|
||||||
16
homeassistant/components/actron_air/manifest.json
Normal file
16
homeassistant/components/actron_air/manifest.json
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"domain": "actron_air",
|
||||||
|
"name": "Actron Air",
|
||||||
|
"codeowners": ["@kclif9", "@JagadishDhanamjayam"],
|
||||||
|
"config_flow": true,
|
||||||
|
"dhcp": [
|
||||||
|
{
|
||||||
|
"hostname": "neo-*",
|
||||||
|
"macaddress": "FC0FE7*"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"documentation": "https://www.home-assistant.io/integrations/actron_air",
|
||||||
|
"iot_class": "cloud_polling",
|
||||||
|
"quality_scale": "bronze",
|
||||||
|
"requirements": ["actron-neo-api==0.1.84"]
|
||||||
|
}
|
||||||
78
homeassistant/components/actron_air/quality_scale.yaml
Normal file
78
homeassistant/components/actron_air/quality_scale.yaml
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have custom service actions.
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules: done
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have custom service actions.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not subscribe to external events.
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
|
||||||
|
# Silver
|
||||||
|
action-exceptions: todo
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters:
|
||||||
|
status: exempt
|
||||||
|
comment: No options flow
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable: done
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable: done
|
||||||
|
parallel-updates: done
|
||||||
|
reauthentication-flow: todo
|
||||||
|
test-coverage: todo
|
||||||
|
|
||||||
|
# Gold
|
||||||
|
devices: done
|
||||||
|
diagnostics: todo
|
||||||
|
discovery-update-info:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration uses DHCP discovery, however is cloud polling. Therefore there is no information to update.
|
||||||
|
discovery: done
|
||||||
|
docs-data-update: done
|
||||||
|
docs-examples: done
|
||||||
|
docs-known-limitations: done
|
||||||
|
docs-supported-devices: done
|
||||||
|
docs-supported-functions: done
|
||||||
|
docs-troubleshooting: done
|
||||||
|
docs-use-cases: done
|
||||||
|
dynamic-devices: todo
|
||||||
|
entity-category:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not use entity categories.
|
||||||
|
entity-device-class:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not use entity device classes.
|
||||||
|
entity-disabled-by-default:
|
||||||
|
status: exempt
|
||||||
|
comment: Not required for this integration at this stage.
|
||||||
|
entity-translations: todo
|
||||||
|
exception-translations: todo
|
||||||
|
icon-translations: todo
|
||||||
|
reconfiguration-flow: todo
|
||||||
|
repair-issues:
|
||||||
|
status: exempt
|
||||||
|
comment: This integration does not have any known issues that require repair.
|
||||||
|
stale-devices: todo
|
||||||
|
|
||||||
|
# Platinum
|
||||||
|
async-dependency: done
|
||||||
|
inject-websession: todo
|
||||||
|
strict-typing: todo
|
||||||
29
homeassistant/components/actron_air/strings.json
Normal file
29
homeassistant/components/actron_air/strings.json
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"config": {
|
||||||
|
"step": {
|
||||||
|
"user": {
|
||||||
|
"title": "Actron Air OAuth2 Authorization"
|
||||||
|
},
|
||||||
|
"timeout": {
|
||||||
|
"title": "Authorization timeout",
|
||||||
|
"description": "The authorization process timed out. Please try again.",
|
||||||
|
"data": {}
|
||||||
|
},
|
||||||
|
"connection_error": {
|
||||||
|
"title": "Connection error",
|
||||||
|
"description": "Failed to connect to Actron Air. Please check your internet connection and try again.",
|
||||||
|
"data": {}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"progress": {
|
||||||
|
"wait_for_authorization": "To authenticate, open the following URL and login at Actron Air:\n{verification_uri}\nIf the code is not automatically copied, paste the following code to authorize the integration:\n\n```{user_code}```\n\n\nThe login attempt will time out after {expires_minutes} minutes."
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"oauth2_error": "Failed to start OAuth2 flow. Please try again later."
|
||||||
|
},
|
||||||
|
"abort": {
|
||||||
|
"oauth2_error": "Failed to start OAuth2 flow",
|
||||||
|
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/adax",
|
"documentation": "https://www.home-assistant.io/integrations/adax",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["adax", "adax_local"],
|
"loggers": ["adax", "adax_local"],
|
||||||
"requirements": ["adax==0.4.0", "Adax-local==0.1.5"]
|
"requirements": ["adax==0.4.0", "Adax-local==0.2.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -71,7 +71,14 @@ class AemetConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(step_id="user", data_schema=schema, errors=errors)
|
return self.async_show_form(
|
||||||
|
step_id="user",
|
||||||
|
data_schema=schema,
|
||||||
|
errors=errors,
|
||||||
|
description_placeholders={
|
||||||
|
"api_key_url": "https://opendata.aemet.es/centrodedescargas/altaUsuario"
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@callback
|
@callback
|
||||||
|
|||||||
@@ -14,7 +14,7 @@
|
|||||||
"longitude": "[%key:common::config_flow::data::longitude%]",
|
"longitude": "[%key:common::config_flow::data::longitude%]",
|
||||||
"name": "Name of the integration"
|
"name": "Name of the integration"
|
||||||
},
|
},
|
||||||
"description": "To generate API key go to https://opendata.aemet.es/centrodedescargas/altaUsuario"
|
"description": "To generate API key go to {api_key_url}"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -53,9 +53,6 @@ __all__ = [
|
|||||||
"GenImageTaskResult",
|
"GenImageTaskResult",
|
||||||
"async_generate_data",
|
"async_generate_data",
|
||||||
"async_generate_image",
|
"async_generate_image",
|
||||||
"async_setup",
|
|
||||||
"async_setup_entry",
|
|
||||||
"async_unload_entry",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|||||||
@@ -1,7 +1,9 @@
|
|||||||
"""Airgradient Update platform."""
|
"""Airgradient Update platform."""
|
||||||
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from airgradient import AirGradientConnectionError
|
||||||
from propcache.api import cached_property
|
from propcache.api import cached_property
|
||||||
|
|
||||||
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
from homeassistant.components.update import UpdateDeviceClass, UpdateEntity
|
||||||
@@ -13,6 +15,7 @@ from .entity import AirGradientEntity
|
|||||||
|
|
||||||
PARALLEL_UPDATES = 1
|
PARALLEL_UPDATES = 1
|
||||||
SCAN_INTERVAL = timedelta(hours=1)
|
SCAN_INTERVAL = timedelta(hours=1)
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(
|
async def async_setup_entry(
|
||||||
@@ -31,6 +34,7 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
|||||||
"""Representation of Airgradient Update."""
|
"""Representation of Airgradient Update."""
|
||||||
|
|
||||||
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
_attr_device_class = UpdateDeviceClass.FIRMWARE
|
||||||
|
_server_unreachable_logged = False
|
||||||
|
|
||||||
def __init__(self, coordinator: AirGradientCoordinator) -> None:
|
def __init__(self, coordinator: AirGradientCoordinator) -> None:
|
||||||
"""Initialize the entity."""
|
"""Initialize the entity."""
|
||||||
@@ -47,10 +51,27 @@ class AirGradientUpdate(AirGradientEntity, UpdateEntity):
|
|||||||
"""Return the installed version of the entity."""
|
"""Return the installed version of the entity."""
|
||||||
return self.coordinator.data.measures.firmware_version
|
return self.coordinator.data.measures.firmware_version
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return if entity is available."""
|
||||||
|
return super().available and self._attr_available
|
||||||
|
|
||||||
async def async_update(self) -> None:
|
async def async_update(self) -> None:
|
||||||
"""Update the entity."""
|
"""Update the entity."""
|
||||||
self._attr_latest_version = (
|
try:
|
||||||
await self.coordinator.client.get_latest_firmware_version(
|
self._attr_latest_version = (
|
||||||
self.coordinator.serial_number
|
await self.coordinator.client.get_latest_firmware_version(
|
||||||
|
self.coordinator.serial_number
|
||||||
|
)
|
||||||
)
|
)
|
||||||
)
|
except AirGradientConnectionError:
|
||||||
|
self._attr_latest_version = None
|
||||||
|
self._attr_available = False
|
||||||
|
if not self._server_unreachable_logged:
|
||||||
|
_LOGGER.error(
|
||||||
|
"Unable to connect to AirGradient server to check for updates"
|
||||||
|
)
|
||||||
|
self._server_unreachable_logged = True
|
||||||
|
else:
|
||||||
|
self._server_unreachable_logged = False
|
||||||
|
self._attr_available = True
|
||||||
|
|||||||
@@ -18,6 +18,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||||||
|
|
||||||
from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS
|
from .const import CONF_USE_NEAREST, DOMAIN, NO_AIRLY_SENSORS
|
||||||
|
|
||||||
|
DESCRIPTION_PLACEHOLDERS = {
|
||||||
|
"developer_registration_url": "https://developer.airly.eu/register",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||||
"""Config flow for Airly."""
|
"""Config flow for Airly."""
|
||||||
@@ -85,6 +89,7 @@ class AirlyFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
}
|
}
|
||||||
),
|
),
|
||||||
errors=errors,
|
errors=errors,
|
||||||
|
description_placeholders=DESCRIPTION_PLACEHOLDERS,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"description": "To generate API key go to https://developer.airly.eu/register",
|
"description": "To generate API key go to {developer_registration_url}",
|
||||||
"data": {
|
"data": {
|
||||||
"name": "[%key:common::config_flow::data::name%]",
|
"name": "[%key:common::config_flow::data::name%]",
|
||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
|
|||||||
@@ -26,6 +26,10 @@ from .const import DOMAIN
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# Documentation URL for API key generation
|
||||||
|
_API_KEY_URL = "https://docs.airnowapi.org/account/request/"
|
||||||
|
|
||||||
|
|
||||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
|
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> bool:
|
||||||
"""Validate the user input allows us to connect.
|
"""Validate the user input allows us to connect.
|
||||||
|
|
||||||
@@ -114,6 +118,7 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
),
|
),
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
|
description_placeholders={"api_key_url": _API_KEY_URL},
|
||||||
errors=errors,
|
errors=errors,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -2,7 +2,7 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"step": {
|
"step": {
|
||||||
"user": {
|
"user": {
|
||||||
"description": "To generate API key go to https://docs.airnowapi.org/account/request/",
|
"description": "To generate API key go to {api_key_url}",
|
||||||
"data": {
|
"data": {
|
||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
"latitude": "[%key:common::config_flow::data::latitude%]",
|
"latitude": "[%key:common::config_flow::data::latitude%]",
|
||||||
|
|||||||
@@ -2,12 +2,23 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
from airos.airos8 import AirOS8
|
from airos.airos8 import AirOS8
|
||||||
|
|
||||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
from homeassistant.const import (
|
||||||
from homeassistant.core import HomeAssistant
|
CONF_HOST,
|
||||||
|
CONF_PASSWORD,
|
||||||
|
CONF_SSL,
|
||||||
|
CONF_USERNAME,
|
||||||
|
CONF_VERIFY_SSL,
|
||||||
|
Platform,
|
||||||
|
)
|
||||||
|
from homeassistant.core import HomeAssistant, callback
|
||||||
|
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
|
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||||
|
|
||||||
_PLATFORMS: list[Platform] = [
|
_PLATFORMS: list[Platform] = [
|
||||||
@@ -15,19 +26,24 @@ _PLATFORMS: list[Platform] = [
|
|||||||
Platform.SENSOR,
|
Platform.SENSOR,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||||
"""Set up Ubiquiti airOS from a config entry."""
|
"""Set up Ubiquiti airOS from a config entry."""
|
||||||
|
|
||||||
# By default airOS 8 comes with self-signed SSL certificates,
|
# By default airOS 8 comes with self-signed SSL certificates,
|
||||||
# with no option in the web UI to change or upload a custom certificate.
|
# with no option in the web UI to change or upload a custom certificate.
|
||||||
session = async_get_clientsession(hass, verify_ssl=False)
|
session = async_get_clientsession(
|
||||||
|
hass, verify_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL]
|
||||||
|
)
|
||||||
|
|
||||||
airos_device = AirOS8(
|
airos_device = AirOS8(
|
||||||
host=entry.data[CONF_HOST],
|
host=entry.data[CONF_HOST],
|
||||||
username=entry.data[CONF_USERNAME],
|
username=entry.data[CONF_USERNAME],
|
||||||
password=entry.data[CONF_PASSWORD],
|
password=entry.data[CONF_PASSWORD],
|
||||||
session=session,
|
session=session,
|
||||||
|
use_ssl=entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||||
)
|
)
|
||||||
|
|
||||||
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
|
coordinator = AirOSDataUpdateCoordinator(hass, entry, airos_device)
|
||||||
@@ -40,6 +56,77 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
async def async_migrate_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||||
|
"""Migrate old config entry."""
|
||||||
|
|
||||||
|
# This means the user has downgraded from a future version
|
||||||
|
if entry.version > 2:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# 1.1 Migrate config_entry to add advanced ssl settings
|
||||||
|
if entry.version == 1 and entry.minor_version == 1:
|
||||||
|
new_minor_version = 2
|
||||||
|
new_data = {**entry.data}
|
||||||
|
advanced_data = {
|
||||||
|
CONF_SSL: DEFAULT_SSL,
|
||||||
|
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
|
||||||
|
}
|
||||||
|
new_data[SECTION_ADVANCED_SETTINGS] = advanced_data
|
||||||
|
|
||||||
|
hass.config_entries.async_update_entry(
|
||||||
|
entry,
|
||||||
|
data=new_data,
|
||||||
|
minor_version=new_minor_version,
|
||||||
|
)
|
||||||
|
|
||||||
|
# 2.1 Migrate binary_sensor entity unique_id from device_id to mac_address
|
||||||
|
# Step 1 - migrate binary_sensor entity unique_id
|
||||||
|
# Step 2 - migrate device entity identifier
|
||||||
|
if entry.version == 1:
|
||||||
|
new_version = 2
|
||||||
|
new_minor_version = 1
|
||||||
|
|
||||||
|
mac_adress = dr.format_mac(entry.unique_id)
|
||||||
|
|
||||||
|
device_registry = dr.async_get(hass)
|
||||||
|
if device_entry := device_registry.async_get_device(
|
||||||
|
connections={(dr.CONNECTION_NETWORK_MAC, mac_adress)}
|
||||||
|
):
|
||||||
|
old_device_id = next(
|
||||||
|
(
|
||||||
|
device_id
|
||||||
|
for domain, device_id in device_entry.identifiers
|
||||||
|
if domain == DOMAIN
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@callback
|
||||||
|
def update_unique_id(
|
||||||
|
entity_entry: er.RegistryEntry,
|
||||||
|
) -> dict[str, str] | None:
|
||||||
|
"""Update unique id from device_id to mac address."""
|
||||||
|
if old_device_id and entity_entry.unique_id.startswith(old_device_id):
|
||||||
|
suffix = entity_entry.unique_id.removeprefix(old_device_id)
|
||||||
|
new_unique_id = f"{mac_adress}{suffix}"
|
||||||
|
return {"new_unique_id": new_unique_id}
|
||||||
|
return None
|
||||||
|
|
||||||
|
await er.async_migrate_entries(hass, entry.entry_id, update_unique_id)
|
||||||
|
|
||||||
|
new_identifiers = device_entry.identifiers.copy()
|
||||||
|
new_identifiers.discard((DOMAIN, old_device_id))
|
||||||
|
new_identifiers.add((DOMAIN, mac_adress))
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device_entry.id, new_identifiers=new_identifiers
|
||||||
|
)
|
||||||
|
|
||||||
|
hass.config_entries.async_update_entry(
|
||||||
|
entry, version=new_version, minor_version=new_minor_version
|
||||||
|
)
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
async def async_unload_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||||
"""Unload a config entry."""
|
"""Unload a config entry."""
|
||||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||||
|
|||||||
@@ -98,7 +98,7 @@ class AirOSBinarySensor(AirOSEntity, BinarySensorEntity):
|
|||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
self._attr_unique_id = f"{coordinator.data.host.device_id}_{description.key}"
|
self._attr_unique_id = f"{coordinator.data.derived.mac}_{description.key}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self) -> bool:
|
def is_on(self) -> bool:
|
||||||
|
|||||||
@@ -2,6 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
@@ -14,11 +15,28 @@ from airos.exceptions import (
|
|||||||
)
|
)
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import (
|
||||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
SOURCE_REAUTH,
|
||||||
|
SOURCE_RECONFIGURE,
|
||||||
|
ConfigFlow,
|
||||||
|
ConfigFlowResult,
|
||||||
|
)
|
||||||
|
from homeassistant.const import (
|
||||||
|
CONF_HOST,
|
||||||
|
CONF_PASSWORD,
|
||||||
|
CONF_SSL,
|
||||||
|
CONF_USERNAME,
|
||||||
|
CONF_VERIFY_SSL,
|
||||||
|
)
|
||||||
|
from homeassistant.data_entry_flow import section
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
from homeassistant.helpers.selector import (
|
||||||
|
TextSelector,
|
||||||
|
TextSelectorConfig,
|
||||||
|
TextSelectorType,
|
||||||
|
)
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DEFAULT_SSL, DEFAULT_VERIFY_SSL, DOMAIN, SECTION_ADVANCED_SETTINGS
|
||||||
from .coordinator import AirOS8
|
from .coordinator import AirOS8
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -28,6 +46,15 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||||||
vol.Required(CONF_HOST): str,
|
vol.Required(CONF_HOST): str,
|
||||||
vol.Required(CONF_USERNAME, default="ubnt"): str,
|
vol.Required(CONF_USERNAME, default="ubnt"): str,
|
||||||
vol.Required(CONF_PASSWORD): str,
|
vol.Required(CONF_PASSWORD): str,
|
||||||
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_SSL, default=DEFAULT_SSL): bool,
|
||||||
|
vol.Required(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): bool,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
{"collapsed": True},
|
||||||
|
),
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -35,48 +62,161 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||||||
class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
"""Handle a config flow for Ubiquiti airOS."""
|
"""Handle a config flow for Ubiquiti airOS."""
|
||||||
|
|
||||||
VERSION = 1
|
VERSION = 2
|
||||||
|
MINOR_VERSION = 1
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the config flow."""
|
||||||
|
super().__init__()
|
||||||
|
self.airos_device: AirOS8
|
||||||
|
self.errors: dict[str, str] = {}
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self,
|
self, user_input: dict[str, Any] | None = None
|
||||||
user_input: dict[str, Any] | None = None,
|
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Handle the initial step."""
|
"""Handle the manual input of host and credentials."""
|
||||||
errors: dict[str, str] = {}
|
self.errors = {}
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
# By default airOS 8 comes with self-signed SSL certificates,
|
validated_info = await self._validate_and_get_device_info(user_input)
|
||||||
# with no option in the web UI to change or upload a custom certificate.
|
if validated_info:
|
||||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
|
||||||
|
|
||||||
airos_device = AirOS8(
|
|
||||||
host=user_input[CONF_HOST],
|
|
||||||
username=user_input[CONF_USERNAME],
|
|
||||||
password=user_input[CONF_PASSWORD],
|
|
||||||
session=session,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
await airos_device.login()
|
|
||||||
airos_data = await airos_device.status()
|
|
||||||
|
|
||||||
except (
|
|
||||||
AirOSConnectionSetupError,
|
|
||||||
AirOSDeviceConnectionError,
|
|
||||||
):
|
|
||||||
errors["base"] = "cannot_connect"
|
|
||||||
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
|
||||||
errors["base"] = "invalid_auth"
|
|
||||||
except AirOSKeyDataMissingError:
|
|
||||||
errors["base"] = "key_data_missing"
|
|
||||||
except Exception:
|
|
||||||
_LOGGER.exception("Unexpected exception")
|
|
||||||
errors["base"] = "unknown"
|
|
||||||
else:
|
|
||||||
await self.async_set_unique_id(airos_data.derived.mac)
|
|
||||||
self._abort_if_unique_id_configured()
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=airos_data.host.hostname, data=user_input
|
title=validated_info["title"],
|
||||||
|
data=validated_info["data"],
|
||||||
|
)
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=self.errors
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _validate_and_get_device_info(
|
||||||
|
self, config_data: dict[str, Any]
|
||||||
|
) -> dict[str, Any] | None:
|
||||||
|
"""Validate user input with the device API."""
|
||||||
|
# By default airOS 8 comes with self-signed SSL certificates,
|
||||||
|
# with no option in the web UI to change or upload a custom certificate.
|
||||||
|
session = async_get_clientsession(
|
||||||
|
self.hass,
|
||||||
|
verify_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL],
|
||||||
|
)
|
||||||
|
|
||||||
|
airos_device = AirOS8(
|
||||||
|
host=config_data[CONF_HOST],
|
||||||
|
username=config_data[CONF_USERNAME],
|
||||||
|
password=config_data[CONF_PASSWORD],
|
||||||
|
session=session,
|
||||||
|
use_ssl=config_data[SECTION_ADVANCED_SETTINGS][CONF_SSL],
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
await airos_device.login()
|
||||||
|
airos_data = await airos_device.status()
|
||||||
|
|
||||||
|
except (
|
||||||
|
AirOSConnectionSetupError,
|
||||||
|
AirOSDeviceConnectionError,
|
||||||
|
):
|
||||||
|
self.errors["base"] = "cannot_connect"
|
||||||
|
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||||
|
self.errors["base"] = "invalid_auth"
|
||||||
|
except AirOSKeyDataMissingError:
|
||||||
|
self.errors["base"] = "key_data_missing"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception during credential validation")
|
||||||
|
self.errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
await self.async_set_unique_id(airos_data.derived.mac)
|
||||||
|
|
||||||
|
if self.source in [SOURCE_REAUTH, SOURCE_RECONFIGURE]:
|
||||||
|
self._abort_if_unique_id_mismatch()
|
||||||
|
else:
|
||||||
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
return {"title": airos_data.host.hostname, "data": config_data}
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self,
|
||||||
|
user_input: Mapping[str, Any],
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Perform reauthentication upon an API authentication error."""
|
||||||
|
return await self.async_step_reauth_confirm(user_input)
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self,
|
||||||
|
user_input: Mapping[str, Any],
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Perform reauthentication upon an API authentication error."""
|
||||||
|
self.errors = {}
|
||||||
|
|
||||||
|
if user_input:
|
||||||
|
validate_data = {**self._get_reauth_entry().data, **user_input}
|
||||||
|
if await self._validate_and_get_device_info(config_data=validate_data):
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
self._get_reauth_entry(),
|
||||||
|
data_updates=validate_data,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PASSWORD): TextSelector(
|
||||||
|
TextSelectorConfig(
|
||||||
|
type=TextSelectorType.PASSWORD,
|
||||||
|
autocomplete="current-password",
|
||||||
|
)
|
||||||
|
),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors=self.errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def async_step_reconfigure(
|
||||||
|
self,
|
||||||
|
user_input: Mapping[str, Any] | None = None,
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reconfiguration of airOS."""
|
||||||
|
self.errors = {}
|
||||||
|
entry = self._get_reconfigure_entry()
|
||||||
|
current_data = entry.data
|
||||||
|
|
||||||
|
if user_input is not None:
|
||||||
|
validate_data = {**current_data, **user_input}
|
||||||
|
if await self._validate_and_get_device_info(config_data=validate_data):
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
entry,
|
||||||
|
data_updates=validate_data,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reconfigure",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(CONF_PASSWORD): TextSelector(
|
||||||
|
TextSelectorConfig(
|
||||||
|
type=TextSelectorType.PASSWORD,
|
||||||
|
autocomplete="current-password",
|
||||||
|
)
|
||||||
|
),
|
||||||
|
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||||
|
vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Required(
|
||||||
|
CONF_SSL,
|
||||||
|
default=current_data[SECTION_ADVANCED_SETTINGS][
|
||||||
|
CONF_SSL
|
||||||
|
],
|
||||||
|
): bool,
|
||||||
|
vol.Required(
|
||||||
|
CONF_VERIFY_SSL,
|
||||||
|
default=current_data[SECTION_ADVANCED_SETTINGS][
|
||||||
|
CONF_VERIFY_SSL
|
||||||
|
],
|
||||||
|
): bool,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
{"collapsed": True},
|
||||||
|
),
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors=self.errors,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -7,3 +7,8 @@ DOMAIN = "airos"
|
|||||||
SCAN_INTERVAL = timedelta(minutes=1)
|
SCAN_INTERVAL = timedelta(minutes=1)
|
||||||
|
|
||||||
MANUFACTURER = "Ubiquiti"
|
MANUFACTURER = "Ubiquiti"
|
||||||
|
|
||||||
|
DEFAULT_VERIFY_SSL = False
|
||||||
|
DEFAULT_SSL = True
|
||||||
|
|
||||||
|
SECTION_ADVANCED_SETTINGS = "advanced_settings"
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ from airos.exceptions import (
|
|||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryError
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import DOMAIN, SCAN_INTERVAL
|
from .const import DOMAIN, SCAN_INTERVAL
|
||||||
@@ -47,9 +47,9 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
|||||||
try:
|
try:
|
||||||
await self.airos_device.login()
|
await self.airos_device.login()
|
||||||
return await self.airos_device.status()
|
return await self.airos_device.status()
|
||||||
except (AirOSConnectionAuthenticationError,) as err:
|
except AirOSConnectionAuthenticationError as err:
|
||||||
_LOGGER.exception("Error authenticating with airOS device")
|
_LOGGER.exception("Error authenticating with airOS device")
|
||||||
raise ConfigEntryError(
|
raise ConfigEntryAuthFailed(
|
||||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||||
) from err
|
) from err
|
||||||
except (
|
except (
|
||||||
|
|||||||
@@ -2,11 +2,11 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from homeassistant.const import CONF_HOST
|
from homeassistant.const import CONF_HOST, CONF_SSL
|
||||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import DOMAIN, MANUFACTURER
|
from .const import DOMAIN, MANUFACTURER, SECTION_ADVANCED_SETTINGS
|
||||||
from .coordinator import AirOSDataUpdateCoordinator
|
from .coordinator import AirOSDataUpdateCoordinator
|
||||||
|
|
||||||
|
|
||||||
@@ -20,17 +20,27 @@ class AirOSEntity(CoordinatorEntity[AirOSDataUpdateCoordinator]):
|
|||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
|
||||||
airos_data = self.coordinator.data
|
airos_data = self.coordinator.data
|
||||||
|
url_schema = (
|
||||||
|
"https"
|
||||||
|
if coordinator.config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL]
|
||||||
|
else "http"
|
||||||
|
)
|
||||||
|
|
||||||
configuration_url: str | None = (
|
configuration_url: str | None = (
|
||||||
f"https://{coordinator.config_entry.data[CONF_HOST]}"
|
f"{url_schema}://{coordinator.config_entry.data[CONF_HOST]}"
|
||||||
)
|
)
|
||||||
|
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
connections={(CONNECTION_NETWORK_MAC, airos_data.derived.mac)},
|
connections={(CONNECTION_NETWORK_MAC, airos_data.derived.mac)},
|
||||||
configuration_url=configuration_url,
|
configuration_url=configuration_url,
|
||||||
identifiers={(DOMAIN, str(airos_data.host.device_id))},
|
identifiers={(DOMAIN, airos_data.derived.mac)},
|
||||||
manufacturer=MANUFACTURER,
|
manufacturer=MANUFACTURER,
|
||||||
model=airos_data.host.devmodel,
|
model=airos_data.host.devmodel,
|
||||||
|
model_id=(
|
||||||
|
sku
|
||||||
|
if (sku := airos_data.derived.sku) not in ["UNKNOWN", "AMBIGUOUS"]
|
||||||
|
else None
|
||||||
|
),
|
||||||
name=airos_data.host.hostname,
|
name=airos_data.host.hostname,
|
||||||
sw_version=airos_data.host.fwversion,
|
sw_version=airos_data.host.fwversion,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -4,7 +4,8 @@
|
|||||||
"codeowners": ["@CoMPaTech"],
|
"codeowners": ["@CoMPaTech"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||||
|
"integration_type": "device",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "silver",
|
||||||
"requirements": ["airos==0.5.1"]
|
"requirements": ["airos==0.5.6"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -32,11 +32,11 @@ rules:
|
|||||||
config-entry-unloading: done
|
config-entry-unloading: done
|
||||||
docs-configuration-parameters: done
|
docs-configuration-parameters: done
|
||||||
docs-installation-parameters: done
|
docs-installation-parameters: done
|
||||||
entity-unavailable: todo
|
entity-unavailable: done
|
||||||
integration-owner: done
|
integration-owner: done
|
||||||
log-when-unavailable: todo
|
log-when-unavailable: done
|
||||||
parallel-updates: todo
|
parallel-updates: done
|
||||||
reauthentication-flow: todo
|
reauthentication-flow: done
|
||||||
test-coverage: done
|
test-coverage: done
|
||||||
|
|
||||||
# Gold
|
# Gold
|
||||||
@@ -48,9 +48,9 @@ rules:
|
|||||||
docs-examples: todo
|
docs-examples: todo
|
||||||
docs-known-limitations: done
|
docs-known-limitations: done
|
||||||
docs-supported-devices: done
|
docs-supported-devices: done
|
||||||
docs-supported-functions: todo
|
docs-supported-functions: done
|
||||||
docs-troubleshooting: done
|
docs-troubleshooting: done
|
||||||
docs-use-cases: todo
|
docs-use-cases: done
|
||||||
dynamic-devices: todo
|
dynamic-devices: todo
|
||||||
entity-category: done
|
entity-category: done
|
||||||
entity-device-class: done
|
entity-device-class: done
|
||||||
@@ -60,7 +60,7 @@ rules:
|
|||||||
icon-translations:
|
icon-translations:
|
||||||
status: exempt
|
status: exempt
|
||||||
comment: no (custom) icons used or envisioned
|
comment: no (custom) icons used or envisioned
|
||||||
reconfiguration-flow: todo
|
reconfiguration-flow: done
|
||||||
repair-issues: todo
|
repair-issues: todo
|
||||||
stale-devices: todo
|
stale-devices: todo
|
||||||
|
|
||||||
|
|||||||
@@ -2,6 +2,35 @@
|
|||||||
"config": {
|
"config": {
|
||||||
"flow_title": "Ubiquiti airOS device",
|
"flow_title": "Ubiquiti airOS device",
|
||||||
"step": {
|
"step": {
|
||||||
|
"reauth_confirm": {
|
||||||
|
"data": {
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"password": "[%key:component::airos::config::step::user::data_description::password%]"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"reconfigure": {
|
||||||
|
"data": {
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"password": "[%key:component::airos::config::step::user::data_description::password%]"
|
||||||
|
},
|
||||||
|
"sections": {
|
||||||
|
"advanced_settings": {
|
||||||
|
"name": "[%key:component::airos::config::step::user::sections::advanced_settings::name%]",
|
||||||
|
"data": {
|
||||||
|
"ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data::ssl%]",
|
||||||
|
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data_description::ssl%]",
|
||||||
|
"verify_ssl": "[%key:component::airos::config::step::user::sections::advanced_settings::data_description::verify_ssl%]"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
@@ -12,6 +41,19 @@
|
|||||||
"host": "IP address or hostname of the airOS device",
|
"host": "IP address or hostname of the airOS device",
|
||||||
"username": "Administrator username for the airOS device, normally 'ubnt'",
|
"username": "Administrator username for the airOS device, normally 'ubnt'",
|
||||||
"password": "Password configured through the UISP app or web interface"
|
"password": "Password configured through the UISP app or web interface"
|
||||||
|
},
|
||||||
|
"sections": {
|
||||||
|
"advanced_settings": {
|
||||||
|
"name": "Advanced settings",
|
||||||
|
"data": {
|
||||||
|
"ssl": "Use HTTPS",
|
||||||
|
"verify_ssl": "[%key:common::config_flow::data::verify_ssl%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"ssl": "Whether the connection should be encrypted (required for most devices)",
|
||||||
|
"verify_ssl": "Whether the certificate should be verified when using HTTPS. This should be off for self-signed certificates"
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -22,7 +64,10 @@
|
|||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||||
|
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||||
|
"unique_id_mismatch": "Re-authentication should be used for the same device not a new one"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
|||||||
@@ -7,5 +7,5 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioairq"],
|
"loggers": ["aioairq"],
|
||||||
"requirements": ["aioairq==0.4.6"]
|
"requirements": ["aioairq==0.4.7"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -29,7 +29,7 @@
|
|||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"return_average": "air-Q allows to poll both the noisy sensor readings as well as the values averaged on the device (default)",
|
"return_average": "air-Q allows to poll both the noisy sensor readings as well as the values averaged on the device (default)",
|
||||||
"clip_negatives": "For baseline calibration purposes, certain sensor values may briefly become negative. The default behaviour is to clip such values to 0"
|
"clip_negatives": "For baseline calibration purposes, certain sensor values may briefly become negative. The default behavior is to clip such values to 0"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -23,6 +23,10 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
URL_API_INTEGRATION = {
|
||||||
|
"url": "https://dashboard.airthings.com/integrations/api-integration"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||||
"""Handle a config flow for Airthings."""
|
"""Handle a config flow for Airthings."""
|
||||||
@@ -37,11 +41,7 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user",
|
step_id="user",
|
||||||
data_schema=STEP_USER_DATA_SCHEMA,
|
data_schema=STEP_USER_DATA_SCHEMA,
|
||||||
description_placeholders={
|
description_placeholders=URL_API_INTEGRATION,
|
||||||
"url": (
|
|
||||||
"https://dashboard.airthings.com/integrations/api-integration"
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
|
|
||||||
errors = {}
|
errors = {}
|
||||||
@@ -65,5 +65,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.async_create_entry(title="Airthings", data=user_input)
|
return self.async_create_entry(title="Airthings", data=user_input)
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
step_id="user",
|
||||||
|
data_schema=STEP_USER_DATA_SCHEMA,
|
||||||
|
errors=errors,
|
||||||
|
description_placeholders=URL_API_INTEGRATION,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -4,9 +4,9 @@
|
|||||||
"user": {
|
"user": {
|
||||||
"data": {
|
"data": {
|
||||||
"id": "ID",
|
"id": "ID",
|
||||||
"secret": "Secret",
|
"secret": "Secret"
|
||||||
"description": "Login at {url} to find your credentials"
|
},
|
||||||
}
|
"description": "Log in at {url} to find your credentials"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
|
|||||||
@@ -6,8 +6,13 @@ import dataclasses
|
|||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
from airthings_ble import (
|
||||||
|
AirthingsBluetoothDeviceData,
|
||||||
|
AirthingsDevice,
|
||||||
|
UnsupportedDeviceError,
|
||||||
|
)
|
||||||
from bleak import BleakError
|
from bleak import BleakError
|
||||||
|
from habluetooth import BluetoothServiceInfoBleak
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import bluetooth
|
from homeassistant.components import bluetooth
|
||||||
@@ -27,6 +32,7 @@ SERVICE_UUIDS = [
|
|||||||
"b42e4a8e-ade7-11e4-89d3-123b93f75cba",
|
"b42e4a8e-ade7-11e4-89d3-123b93f75cba",
|
||||||
"b42e1c08-ade7-11e4-89d3-123b93f75cba",
|
"b42e1c08-ade7-11e4-89d3-123b93f75cba",
|
||||||
"b42e3882-ade7-11e4-89d3-123b93f75cba",
|
"b42e3882-ade7-11e4-89d3-123b93f75cba",
|
||||||
|
"b42e90a2-ade7-11e4-89d3-123b93f75cba",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@@ -37,6 +43,7 @@ class Discovery:
|
|||||||
name: str
|
name: str
|
||||||
discovery_info: BluetoothServiceInfo
|
discovery_info: BluetoothServiceInfo
|
||||||
device: AirthingsDevice
|
device: AirthingsDevice
|
||||||
|
data: AirthingsBluetoothDeviceData
|
||||||
|
|
||||||
|
|
||||||
def get_name(device: AirthingsDevice) -> str:
|
def get_name(device: AirthingsDevice) -> str:
|
||||||
@@ -44,7 +51,7 @@ def get_name(device: AirthingsDevice) -> str:
|
|||||||
|
|
||||||
name = device.friendly_name()
|
name = device.friendly_name()
|
||||||
if identifier := device.identifier:
|
if identifier := device.identifier:
|
||||||
name += f" ({identifier})"
|
name += f" ({device.model.value}{identifier})"
|
||||||
return name
|
return name
|
||||||
|
|
||||||
|
|
||||||
@@ -62,8 +69,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self._discovered_device: Discovery | None = None
|
self._discovered_device: Discovery | None = None
|
||||||
self._discovered_devices: dict[str, Discovery] = {}
|
self._discovered_devices: dict[str, Discovery] = {}
|
||||||
|
|
||||||
async def _get_device_data(
|
async def _get_device(
|
||||||
self, discovery_info: BluetoothServiceInfo
|
self, data: AirthingsBluetoothDeviceData, discovery_info: BluetoothServiceInfo
|
||||||
) -> AirthingsDevice:
|
) -> AirthingsDevice:
|
||||||
ble_device = bluetooth.async_ble_device_from_address(
|
ble_device = bluetooth.async_ble_device_from_address(
|
||||||
self.hass, discovery_info.address
|
self.hass, discovery_info.address
|
||||||
@@ -72,10 +79,8 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
_LOGGER.debug("no ble_device in _get_device_data")
|
_LOGGER.debug("no ble_device in _get_device_data")
|
||||||
raise AirthingsDeviceUpdateError("No ble_device")
|
raise AirthingsDeviceUpdateError("No ble_device")
|
||||||
|
|
||||||
airthings = AirthingsBluetoothDeviceData(_LOGGER)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = await airthings.update_device(ble_device)
|
device = await data.update_device(ble_device)
|
||||||
except BleakError as err:
|
except BleakError as err:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Error connecting to and getting data from %s: %s",
|
"Error connecting to and getting data from %s: %s",
|
||||||
@@ -83,12 +88,15 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
err,
|
err,
|
||||||
)
|
)
|
||||||
raise AirthingsDeviceUpdateError("Failed getting device data") from err
|
raise AirthingsDeviceUpdateError("Failed getting device data") from err
|
||||||
|
except UnsupportedDeviceError:
|
||||||
|
_LOGGER.debug("Skipping unsupported device: %s", discovery_info.name)
|
||||||
|
raise
|
||||||
except Exception as err:
|
except Exception as err:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Unknown error occurred from %s: %s", discovery_info.address, err
|
"Unknown error occurred from %s: %s", discovery_info.address, err
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
return data
|
return device
|
||||||
|
|
||||||
async def async_step_bluetooth(
|
async def async_step_bluetooth(
|
||||||
self, discovery_info: BluetoothServiceInfo
|
self, discovery_info: BluetoothServiceInfo
|
||||||
@@ -98,17 +106,21 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
await self.async_set_unique_id(discovery_info.address)
|
await self.async_set_unique_id(discovery_info.address)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
|
data = AirthingsBluetoothDeviceData(logger=_LOGGER)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
device = await self._get_device_data(discovery_info)
|
device = await self._get_device(data=data, discovery_info=discovery_info)
|
||||||
except AirthingsDeviceUpdateError:
|
except AirthingsDeviceUpdateError:
|
||||||
return self.async_abort(reason="cannot_connect")
|
return self.async_abort(reason="cannot_connect")
|
||||||
|
except UnsupportedDeviceError:
|
||||||
|
return self.async_abort(reason="unsupported_device")
|
||||||
except Exception:
|
except Exception:
|
||||||
_LOGGER.exception("Unknown error occurred")
|
_LOGGER.exception("Unknown error occurred")
|
||||||
return self.async_abort(reason="unknown")
|
return self.async_abort(reason="unknown")
|
||||||
|
|
||||||
name = get_name(device)
|
name = get_name(device)
|
||||||
self.context["title_placeholders"] = {"name": name}
|
self.context["title_placeholders"] = {"name": name}
|
||||||
self._discovered_device = Discovery(name, discovery_info, device)
|
self._discovered_device = Discovery(name, discovery_info, device, data=data)
|
||||||
|
|
||||||
return await self.async_step_bluetooth_confirm()
|
return await self.async_step_bluetooth_confirm()
|
||||||
|
|
||||||
@@ -117,6 +129,12 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Confirm discovery."""
|
"""Confirm discovery."""
|
||||||
if user_input is not None:
|
if user_input is not None:
|
||||||
|
if (
|
||||||
|
self._discovered_device is not None
|
||||||
|
and self._discovered_device.device.firmware.need_firmware_upgrade
|
||||||
|
):
|
||||||
|
return self.async_abort(reason="firmware_upgrade_required")
|
||||||
|
|
||||||
return self.async_create_entry(
|
return self.async_create_entry(
|
||||||
title=self.context["title_placeholders"]["name"], data={}
|
title=self.context["title_placeholders"]["name"], data={}
|
||||||
)
|
)
|
||||||
@@ -137,6 +155,9 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
discovery = self._discovered_devices[address]
|
discovery = self._discovered_devices[address]
|
||||||
|
|
||||||
|
if discovery.device.firmware.need_firmware_upgrade:
|
||||||
|
return self.async_abort(reason="firmware_upgrade_required")
|
||||||
|
|
||||||
self.context["title_placeholders"] = {
|
self.context["title_placeholders"] = {
|
||||||
"name": discovery.name,
|
"name": discovery.name,
|
||||||
}
|
}
|
||||||
@@ -146,32 +167,53 @@ class AirthingsConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return self.async_create_entry(title=discovery.name, data={})
|
return self.async_create_entry(title=discovery.name, data={})
|
||||||
|
|
||||||
current_addresses = self._async_current_ids(include_ignore=False)
|
current_addresses = self._async_current_ids(include_ignore=False)
|
||||||
|
devices: list[BluetoothServiceInfoBleak] = []
|
||||||
for discovery_info in async_discovered_service_info(self.hass):
|
for discovery_info in async_discovered_service_info(self.hass):
|
||||||
address = discovery_info.address
|
address = discovery_info.address
|
||||||
if address in current_addresses or address in self._discovered_devices:
|
if address in current_addresses or address in self._discovered_devices:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if MFCT_ID not in discovery_info.manufacturer_data:
|
if MFCT_ID not in discovery_info.manufacturer_data:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
if not any(uuid in SERVICE_UUIDS for uuid in discovery_info.service_uuids):
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Skipping unsupported device: %s (%s)", discovery_info.name, address
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
devices.append(discovery_info)
|
||||||
|
|
||||||
|
for discovery_info in devices:
|
||||||
|
address = discovery_info.address
|
||||||
|
data = AirthingsBluetoothDeviceData(logger=_LOGGER)
|
||||||
try:
|
try:
|
||||||
device = await self._get_device_data(discovery_info)
|
device = await self._get_device(data, discovery_info)
|
||||||
except AirthingsDeviceUpdateError:
|
except AirthingsDeviceUpdateError:
|
||||||
return self.async_abort(reason="cannot_connect")
|
_LOGGER.error(
|
||||||
|
"Error connecting to and getting data from %s (%s)",
|
||||||
|
discovery_info.name,
|
||||||
|
discovery_info.address,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
except UnsupportedDeviceError:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Skipping unsupported device: %s (%s)",
|
||||||
|
discovery_info.name,
|
||||||
|
discovery_info.address,
|
||||||
|
)
|
||||||
|
continue
|
||||||
except Exception:
|
except Exception:
|
||||||
_LOGGER.exception("Unknown error occurred")
|
_LOGGER.exception("Unknown error occurred")
|
||||||
return self.async_abort(reason="unknown")
|
return self.async_abort(reason="unknown")
|
||||||
name = get_name(device)
|
name = get_name(device)
|
||||||
self._discovered_devices[address] = Discovery(name, discovery_info, device)
|
_LOGGER.debug("Discovered Airthings device: %s (%s)", name, address)
|
||||||
|
self._discovered_devices[address] = Discovery(
|
||||||
|
name, discovery_info, device, data
|
||||||
|
)
|
||||||
|
|
||||||
if not self._discovered_devices:
|
if not self._discovered_devices:
|
||||||
return self.async_abort(reason="no_devices_found")
|
return self.async_abort(reason="no_devices_found")
|
||||||
|
|
||||||
titles = {
|
titles = {
|
||||||
address: discovery.device.name
|
address: get_name(discovery.device)
|
||||||
for (address, discovery) in self._discovered_devices.items()
|
for (address, discovery) in self._discovered_devices.items()
|
||||||
}
|
}
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
|
|||||||
@@ -17,6 +17,10 @@
|
|||||||
{
|
{
|
||||||
"manufacturer_id": 820,
|
"manufacturer_id": 820,
|
||||||
"service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba"
|
"service_uuid": "b42e3882-ade7-11e4-89d3-123b93f75cba"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"manufacturer_id": 820,
|
||||||
|
"service_uuid": "b42e90a2-ade7-11e4-89d3-123b93f75cba"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"codeowners": ["@vincegio", "@LaStrada"],
|
"codeowners": ["@vincegio", "@LaStrada"],
|
||||||
@@ -24,5 +28,5 @@
|
|||||||
"dependencies": ["bluetooth_adapters"],
|
"dependencies": ["bluetooth_adapters"],
|
||||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"requirements": ["airthings-ble==0.9.2"]
|
"requirements": ["airthings-ble==1.1.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -16,10 +16,12 @@ from homeassistant.components.sensor import (
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONCENTRATION_PARTS_PER_BILLION,
|
CONCENTRATION_PARTS_PER_BILLION,
|
||||||
CONCENTRATION_PARTS_PER_MILLION,
|
CONCENTRATION_PARTS_PER_MILLION,
|
||||||
|
LIGHT_LUX,
|
||||||
PERCENTAGE,
|
PERCENTAGE,
|
||||||
EntityCategory,
|
EntityCategory,
|
||||||
Platform,
|
Platform,
|
||||||
UnitOfPressure,
|
UnitOfPressure,
|
||||||
|
UnitOfSoundPressure,
|
||||||
UnitOfTemperature,
|
UnitOfTemperature,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
@@ -112,8 +114,25 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
suggested_display_precision=0,
|
suggested_display_precision=0,
|
||||||
),
|
),
|
||||||
|
"lux": SensorEntityDescription(
|
||||||
|
key="lux",
|
||||||
|
device_class=SensorDeviceClass.ILLUMINANCE,
|
||||||
|
native_unit_of_measurement=LIGHT_LUX,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
),
|
||||||
|
"noise": SensorEntityDescription(
|
||||||
|
key="noise",
|
||||||
|
translation_key="ambient_noise",
|
||||||
|
device_class=SensorDeviceClass.SOUND_PRESSURE,
|
||||||
|
native_unit_of_measurement=UnitOfSoundPressure.WEIGHTED_DECIBEL_A,
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:
|
def async_migrate(hass: HomeAssistant, address: str, sensor_name: str) -> None:
|
||||||
|
|||||||
@@ -6,6 +6,9 @@
|
|||||||
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
"description": "[%key:component::bluetooth::config::step::user::description%]",
|
||||||
"data": {
|
"data": {
|
||||||
"address": "[%key:common::config_flow::data::device%]"
|
"address": "[%key:common::config_flow::data::device%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"address": "The Airthings devices discovered via Bluetooth."
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"bluetooth_confirm": {
|
"bluetooth_confirm": {
|
||||||
@@ -17,6 +20,8 @@
|
|||||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
|
"firmware_upgrade_required": "Your device requires a firmware upgrade. Please use the Airthings app (Android/iOS) to upgrade it.",
|
||||||
|
"unsupported_device": "Unsupported device",
|
||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -36,6 +41,9 @@
|
|||||||
},
|
},
|
||||||
"illuminance": {
|
"illuminance": {
|
||||||
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
"name": "[%key:component::sensor::entity_component::illuminance::name%]"
|
||||||
|
},
|
||||||
|
"ambient_noise": {
|
||||||
|
"name": "Ambient noise"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,17 +2,14 @@
|
|||||||
|
|
||||||
from airtouch4pyapi import AirTouch
|
from airtouch4pyapi import AirTouch
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import CONF_HOST, Platform
|
from homeassistant.const import CONF_HOST, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import ConfigEntryNotReady
|
from homeassistant.exceptions import ConfigEntryNotReady
|
||||||
|
|
||||||
from .coordinator import AirtouchDataUpdateCoordinator
|
from .coordinator import AirTouch4ConfigEntry, AirtouchDataUpdateCoordinator
|
||||||
|
|
||||||
PLATFORMS = [Platform.CLIMATE]
|
PLATFORMS = [Platform.CLIMATE]
|
||||||
|
|
||||||
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) -> bool:
|
||||||
"""Set up AirTouch4 from a config entry."""
|
"""Set up AirTouch4 from a config entry."""
|
||||||
@@ -22,7 +19,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirTouch4ConfigEntry) ->
|
|||||||
info = airtouch.GetAcs()
|
info = airtouch.GetAcs()
|
||||||
if not info:
|
if not info:
|
||||||
raise ConfigEntryNotReady
|
raise ConfigEntryNotReady
|
||||||
coordinator = AirtouchDataUpdateCoordinator(hass, airtouch)
|
coordinator = AirtouchDataUpdateCoordinator(hass, entry, airtouch)
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
entry.runtime_data = coordinator
|
entry.runtime_data = coordinator
|
||||||
|
|
||||||
|
|||||||
@@ -2,26 +2,34 @@
|
|||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
from airtouch4pyapi import AirTouch
|
||||||
from airtouch4pyapi.airtouch import AirTouchStatus
|
from airtouch4pyapi.airtouch import AirTouchStatus
|
||||||
|
|
||||||
from homeassistant.components.climate import SCAN_INTERVAL
|
from homeassistant.components.climate import SCAN_INTERVAL
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import DOMAIN
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
type AirTouch4ConfigEntry = ConfigEntry[AirtouchDataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
|
class AirtouchDataUpdateCoordinator(DataUpdateCoordinator):
|
||||||
"""Class to manage fetching Airtouch data."""
|
"""Class to manage fetching Airtouch data."""
|
||||||
|
|
||||||
def __init__(self, hass, airtouch):
|
def __init__(
|
||||||
|
self, hass: HomeAssistant, entry: AirTouch4ConfigEntry, airtouch: AirTouch
|
||||||
|
) -> None:
|
||||||
"""Initialize global Airtouch data updater."""
|
"""Initialize global Airtouch data updater."""
|
||||||
self.airtouch = airtouch
|
self.airtouch = airtouch
|
||||||
|
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
|
config_entry=entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_interval=SCAN_INTERVAL,
|
update_interval=SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -6,17 +6,19 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from typing import Any, Final
|
from typing import Any, Final
|
||||||
|
|
||||||
from aioairzone.common import GrilleAngle, OperationMode, SleepTimeout
|
from aioairzone.common import GrilleAngle, OperationMode, QAdapt, SleepTimeout
|
||||||
from aioairzone.const import (
|
from aioairzone.const import (
|
||||||
API_COLD_ANGLE,
|
API_COLD_ANGLE,
|
||||||
API_HEAT_ANGLE,
|
API_HEAT_ANGLE,
|
||||||
API_MODE,
|
API_MODE,
|
||||||
|
API_Q_ADAPT,
|
||||||
API_SLEEP,
|
API_SLEEP,
|
||||||
AZD_COLD_ANGLE,
|
AZD_COLD_ANGLE,
|
||||||
AZD_HEAT_ANGLE,
|
AZD_HEAT_ANGLE,
|
||||||
AZD_MASTER,
|
AZD_MASTER,
|
||||||
AZD_MODE,
|
AZD_MODE,
|
||||||
AZD_MODES,
|
AZD_MODES,
|
||||||
|
AZD_Q_ADAPT,
|
||||||
AZD_SLEEP,
|
AZD_SLEEP,
|
||||||
AZD_ZONES,
|
AZD_ZONES,
|
||||||
)
|
)
|
||||||
@@ -65,6 +67,14 @@ SLEEP_DICT: Final[dict[str, int]] = {
|
|||||||
"90m": SleepTimeout.SLEEP_90,
|
"90m": SleepTimeout.SLEEP_90,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Q_ADAPT_DICT: Final[dict[str, int]] = {
|
||||||
|
"standard": QAdapt.STANDARD,
|
||||||
|
"power": QAdapt.POWER,
|
||||||
|
"silence": QAdapt.SILENCE,
|
||||||
|
"minimum": QAdapt.MINIMUM,
|
||||||
|
"maximum": QAdapt.MAXIMUM,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
def main_zone_options(
|
def main_zone_options(
|
||||||
zone_data: dict[str, Any],
|
zone_data: dict[str, Any],
|
||||||
@@ -83,6 +93,14 @@ MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
|||||||
options_fn=main_zone_options,
|
options_fn=main_zone_options,
|
||||||
translation_key="modes",
|
translation_key="modes",
|
||||||
),
|
),
|
||||||
|
AirzoneSelectDescription(
|
||||||
|
api_param=API_Q_ADAPT,
|
||||||
|
entity_category=EntityCategory.CONFIG,
|
||||||
|
key=AZD_Q_ADAPT,
|
||||||
|
options=list(Q_ADAPT_DICT),
|
||||||
|
options_dict=Q_ADAPT_DICT,
|
||||||
|
translation_key="q_adapt",
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -63,6 +63,16 @@
|
|||||||
"stop": "Stop"
|
"stop": "Stop"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"q_adapt": {
|
||||||
|
"name": "Q-Adapt",
|
||||||
|
"state": {
|
||||||
|
"standard": "Standard",
|
||||||
|
"power": "Power",
|
||||||
|
"silence": "Silence",
|
||||||
|
"minimum": "Minimum",
|
||||||
|
"maximum": "Maximum"
|
||||||
|
}
|
||||||
|
},
|
||||||
"sleep_times": {
|
"sleep_times": {
|
||||||
"name": "Sleep",
|
"name": "Sleep",
|
||||||
"state": {
|
"state": {
|
||||||
|
|||||||
@@ -22,6 +22,17 @@ class OAuth2FlowHandler(
|
|||||||
VERSION = CONFIG_FLOW_VERSION
|
VERSION = CONFIG_FLOW_VERSION
|
||||||
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
|
MINOR_VERSION = CONFIG_FLOW_MINOR_VERSION
|
||||||
|
|
||||||
|
async def async_step_user(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Check we have the cloud integration set up."""
|
||||||
|
if "cloud" not in self.hass.config.components:
|
||||||
|
return self.async_abort(
|
||||||
|
reason="cloud_not_enabled",
|
||||||
|
description_placeholders={"default_config": "default_config"},
|
||||||
|
)
|
||||||
|
return await super().async_step_user(user_input)
|
||||||
|
|
||||||
async def async_step_reauth(
|
async def async_step_reauth(
|
||||||
self, user_input: Mapping[str, Any]
|
self, user_input: Mapping[str, Any]
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
|
|||||||
@@ -24,7 +24,8 @@
|
|||||||
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
"no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]",
|
||||||
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
"user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]",
|
||||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||||
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account."
|
"wrong_account": "You are authenticated with a different account than the one set up. Please authenticate with the configured account.",
|
||||||
|
"cloud_not_enabled": "Please make sure you run Home Assistant with `{default_config}` enabled in your configuration.yaml."
|
||||||
},
|
},
|
||||||
"create_entry": {
|
"create_entry": {
|
||||||
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
"default": "[%key:common::config_flow::create_entry::authenticated%]"
|
||||||
|
|||||||
@@ -2,10 +2,9 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
|
||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any, Final, final
|
from typing import Any, Final, final
|
||||||
|
|
||||||
from propcache.api import cached_property
|
from propcache.api import cached_property
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
@@ -28,8 +27,6 @@ from homeassistant.helpers import config_validation as cv
|
|||||||
from homeassistant.helpers.config_validation import make_entity_service_schema
|
from homeassistant.helpers.config_validation import make_entity_service_schema
|
||||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||||
from homeassistant.helpers.entity_component import EntityComponent
|
from homeassistant.helpers.entity_component import EntityComponent
|
||||||
from homeassistant.helpers.entity_platform import EntityPlatform
|
|
||||||
from homeassistant.helpers.frame import ReportBehavior, report_usage
|
|
||||||
from homeassistant.helpers.typing import ConfigType
|
from homeassistant.helpers.typing import ConfigType
|
||||||
from homeassistant.util.hass_dict import HassKey
|
from homeassistant.util.hass_dict import HassKey
|
||||||
|
|
||||||
@@ -149,68 +146,11 @@ class AlarmControlPanelEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_A
|
|||||||
)
|
)
|
||||||
_alarm_control_panel_option_default_code: str | None = None
|
_alarm_control_panel_option_default_code: str | None = None
|
||||||
|
|
||||||
__alarm_legacy_state: bool = False
|
|
||||||
|
|
||||||
def __init_subclass__(cls, **kwargs: Any) -> None:
|
|
||||||
"""Post initialisation processing."""
|
|
||||||
super().__init_subclass__(**kwargs)
|
|
||||||
if any(method in cls.__dict__ for method in ("_attr_state", "state")):
|
|
||||||
# Integrations should use the 'alarm_state' property instead of
|
|
||||||
# setting the state directly.
|
|
||||||
cls.__alarm_legacy_state = True
|
|
||||||
|
|
||||||
def __setattr__(self, name: str, value: Any, /) -> None:
|
|
||||||
"""Set attribute.
|
|
||||||
|
|
||||||
Deprecation warning if setting '_attr_state' directly
|
|
||||||
unless already reported.
|
|
||||||
"""
|
|
||||||
if name == "_attr_state":
|
|
||||||
self._report_deprecated_alarm_state_handling()
|
|
||||||
return super().__setattr__(name, value)
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def add_to_platform_start(
|
|
||||||
self,
|
|
||||||
hass: HomeAssistant,
|
|
||||||
platform: EntityPlatform,
|
|
||||||
parallel_updates: asyncio.Semaphore | None,
|
|
||||||
) -> None:
|
|
||||||
"""Start adding an entity to a platform."""
|
|
||||||
super().add_to_platform_start(hass, platform, parallel_updates)
|
|
||||||
if self.__alarm_legacy_state:
|
|
||||||
self._report_deprecated_alarm_state_handling()
|
|
||||||
|
|
||||||
@callback
|
|
||||||
def _report_deprecated_alarm_state_handling(self) -> None:
|
|
||||||
"""Report on deprecated handling of alarm state.
|
|
||||||
|
|
||||||
Integrations should implement alarm_state instead of using state directly.
|
|
||||||
"""
|
|
||||||
report_usage(
|
|
||||||
"is setting state directly."
|
|
||||||
f" Entity {self.entity_id} ({type(self)}) should implement the 'alarm_state'"
|
|
||||||
" property and return its state using the AlarmControlPanelState enum",
|
|
||||||
core_integration_behavior=ReportBehavior.ERROR,
|
|
||||||
custom_integration_behavior=ReportBehavior.LOG,
|
|
||||||
breaks_in_ha_version="2025.11",
|
|
||||||
integration_domain=self.platform.platform_name if self.platform else None,
|
|
||||||
exclude_integrations={DOMAIN},
|
|
||||||
)
|
|
||||||
|
|
||||||
@final
|
@final
|
||||||
@property
|
@property
|
||||||
def state(self) -> str | None:
|
def state(self) -> str | None:
|
||||||
"""Return the current state."""
|
"""Return the current state."""
|
||||||
if (alarm_state := self.alarm_state) is not None:
|
return self.alarm_state
|
||||||
return alarm_state
|
|
||||||
if self._attr_state is not None:
|
|
||||||
# Backwards compatibility for integrations that set state directly
|
|
||||||
# Should be removed in 2025.11
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
assert isinstance(self._attr_state, str)
|
|
||||||
return self._attr_state
|
|
||||||
return None
|
|
||||||
|
|
||||||
@cached_property
|
@cached_property
|
||||||
def alarm_state(self) -> AlarmControlPanelState | None:
|
def alarm_state(self) -> AlarmControlPanelState | None:
|
||||||
|
|||||||
@@ -1472,10 +1472,10 @@ class AlexaModeController(AlexaCapability):
|
|||||||
# Return state instead of position when using ModeController.
|
# Return state instead of position when using ModeController.
|
||||||
mode = self.entity.state
|
mode = self.entity.state
|
||||||
if mode in (
|
if mode in (
|
||||||
cover.STATE_OPEN,
|
cover.CoverState.OPEN,
|
||||||
cover.STATE_OPENING,
|
cover.CoverState.OPENING,
|
||||||
cover.STATE_CLOSED,
|
cover.CoverState.CLOSED,
|
||||||
cover.STATE_CLOSING,
|
cover.CoverState.CLOSING,
|
||||||
STATE_UNKNOWN,
|
STATE_UNKNOWN,
|
||||||
):
|
):
|
||||||
return f"{cover.ATTR_POSITION}.{mode}"
|
return f"{cover.ATTR_POSITION}.{mode}"
|
||||||
@@ -1594,11 +1594,11 @@ class AlexaModeController(AlexaCapability):
|
|||||||
["Position", AlexaGlobalCatalog.SETTING_OPENING], False
|
["Position", AlexaGlobalCatalog.SETTING_OPENING], False
|
||||||
)
|
)
|
||||||
self._resource.add_mode(
|
self._resource.add_mode(
|
||||||
f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}",
|
f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}",
|
||||||
[AlexaGlobalCatalog.VALUE_OPEN],
|
[AlexaGlobalCatalog.VALUE_OPEN],
|
||||||
)
|
)
|
||||||
self._resource.add_mode(
|
self._resource.add_mode(
|
||||||
f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}",
|
f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}",
|
||||||
[AlexaGlobalCatalog.VALUE_CLOSE],
|
[AlexaGlobalCatalog.VALUE_CLOSE],
|
||||||
)
|
)
|
||||||
self._resource.add_mode(
|
self._resource.add_mode(
|
||||||
@@ -1651,22 +1651,22 @@ class AlexaModeController(AlexaCapability):
|
|||||||
raise_labels.append(AlexaSemantics.ACTION_OPEN)
|
raise_labels.append(AlexaSemantics.ACTION_OPEN)
|
||||||
self._semantics.add_states_to_value(
|
self._semantics.add_states_to_value(
|
||||||
[AlexaSemantics.STATES_CLOSED],
|
[AlexaSemantics.STATES_CLOSED],
|
||||||
f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}",
|
f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}",
|
||||||
)
|
)
|
||||||
self._semantics.add_states_to_value(
|
self._semantics.add_states_to_value(
|
||||||
[AlexaSemantics.STATES_OPEN],
|
[AlexaSemantics.STATES_OPEN],
|
||||||
f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}",
|
f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}",
|
||||||
)
|
)
|
||||||
|
|
||||||
self._semantics.add_action_to_directive(
|
self._semantics.add_action_to_directive(
|
||||||
lower_labels,
|
lower_labels,
|
||||||
"SetMode",
|
"SetMode",
|
||||||
{"mode": f"{cover.ATTR_POSITION}.{cover.STATE_CLOSED}"},
|
{"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.CLOSED}"},
|
||||||
)
|
)
|
||||||
self._semantics.add_action_to_directive(
|
self._semantics.add_action_to_directive(
|
||||||
raise_labels,
|
raise_labels,
|
||||||
"SetMode",
|
"SetMode",
|
||||||
{"mode": f"{cover.ATTR_POSITION}.{cover.STATE_OPEN}"},
|
{"mode": f"{cover.ATTR_POSITION}.{cover.CoverState.OPEN}"},
|
||||||
)
|
)
|
||||||
|
|
||||||
return self._semantics.serialize_semantics()
|
return self._semantics.serialize_semantics()
|
||||||
|
|||||||
@@ -1261,9 +1261,9 @@ async def async_api_set_mode(
|
|||||||
elif instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
|
elif instance == f"{cover.DOMAIN}.{cover.ATTR_POSITION}":
|
||||||
position = mode.split(".")[1]
|
position = mode.split(".")[1]
|
||||||
|
|
||||||
if position == cover.STATE_CLOSED:
|
if position == cover.CoverState.CLOSED:
|
||||||
service = cover.SERVICE_CLOSE_COVER
|
service = cover.SERVICE_CLOSE_COVER
|
||||||
elif position == cover.STATE_OPEN:
|
elif position == cover.CoverState.OPEN:
|
||||||
service = cover.SERVICE_OPEN_COVER
|
service = cover.SERVICE_OPEN_COVER
|
||||||
elif position == "custom":
|
elif position == "custom":
|
||||||
service = cover.SERVICE_STOP_COVER
|
service = cover.SERVICE_STOP_COVER
|
||||||
|
|||||||
@@ -10,6 +10,7 @@ from aioamazondevices.api import AmazonDevice
|
|||||||
from aioamazondevices.const import SENSOR_STATE_OFF
|
from aioamazondevices.const import SENSOR_STATE_OFF
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import (
|
from homeassistant.components.binary_sensor import (
|
||||||
|
DOMAIN as BINARY_SENSOR_DOMAIN,
|
||||||
BinarySensorDeviceClass,
|
BinarySensorDeviceClass,
|
||||||
BinarySensorEntity,
|
BinarySensorEntity,
|
||||||
BinarySensorEntityDescription,
|
BinarySensorEntityDescription,
|
||||||
@@ -17,9 +18,12 @@ from homeassistant.components.binary_sensor import (
|
|||||||
from homeassistant.const import EntityCategory
|
from homeassistant.const import EntityCategory
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
import homeassistant.helpers.entity_registry as er
|
||||||
|
|
||||||
|
from .const import _LOGGER, DOMAIN
|
||||||
from .coordinator import AmazonConfigEntry
|
from .coordinator import AmazonConfigEntry
|
||||||
from .entity import AmazonEntity
|
from .entity import AmazonEntity
|
||||||
|
from .utils import async_update_unique_id
|
||||||
|
|
||||||
# Coordinator is used to centralize the data updates
|
# Coordinator is used to centralize the data updates
|
||||||
PARALLEL_UPDATES = 0
|
PARALLEL_UPDATES = 0
|
||||||
@@ -31,6 +35,7 @@ class AmazonBinarySensorEntityDescription(BinarySensorEntityDescription):
|
|||||||
|
|
||||||
is_on_fn: Callable[[AmazonDevice, str], bool]
|
is_on_fn: Callable[[AmazonDevice, str], bool]
|
||||||
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
is_supported: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
||||||
|
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: True
|
||||||
|
|
||||||
|
|
||||||
BINARY_SENSORS: Final = (
|
BINARY_SENSORS: Final = (
|
||||||
@@ -40,47 +45,52 @@ BINARY_SENSORS: Final = (
|
|||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
is_on_fn=lambda device, _: device.online,
|
is_on_fn=lambda device, _: device.online,
|
||||||
),
|
),
|
||||||
|
AmazonBinarySensorEntityDescription(
|
||||||
|
key="detectionState",
|
||||||
|
device_class=BinarySensorDeviceClass.MOTION,
|
||||||
|
is_on_fn=lambda device, key: bool(
|
||||||
|
device.sensors[key].value != SENSOR_STATE_OFF
|
||||||
|
),
|
||||||
|
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
||||||
|
is_available_fn=lambda device, key: (
|
||||||
|
device.online
|
||||||
|
and (sensor := device.sensors.get(key)) is not None
|
||||||
|
and sensor.error is False
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
DEPRECATED_BINARY_SENSORS: Final = (
|
||||||
AmazonBinarySensorEntityDescription(
|
AmazonBinarySensorEntityDescription(
|
||||||
key="bluetooth",
|
key="bluetooth",
|
||||||
entity_category=EntityCategory.DIAGNOSTIC,
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
translation_key="bluetooth",
|
translation_key="bluetooth",
|
||||||
is_on_fn=lambda device, _: device.bluetooth_state,
|
is_on_fn=lambda device, key: False,
|
||||||
),
|
),
|
||||||
AmazonBinarySensorEntityDescription(
|
AmazonBinarySensorEntityDescription(
|
||||||
key="babyCryDetectionState",
|
key="babyCryDetectionState",
|
||||||
translation_key="baby_cry_detection",
|
translation_key="baby_cry_detection",
|
||||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
is_on_fn=lambda device, key: False,
|
||||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
|
||||||
),
|
),
|
||||||
AmazonBinarySensorEntityDescription(
|
AmazonBinarySensorEntityDescription(
|
||||||
key="beepingApplianceDetectionState",
|
key="beepingApplianceDetectionState",
|
||||||
translation_key="beeping_appliance_detection",
|
translation_key="beeping_appliance_detection",
|
||||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
is_on_fn=lambda device, key: False,
|
||||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
|
||||||
),
|
),
|
||||||
AmazonBinarySensorEntityDescription(
|
AmazonBinarySensorEntityDescription(
|
||||||
key="coughDetectionState",
|
key="coughDetectionState",
|
||||||
translation_key="cough_detection",
|
translation_key="cough_detection",
|
||||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
is_on_fn=lambda device, key: False,
|
||||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
|
||||||
),
|
),
|
||||||
AmazonBinarySensorEntityDescription(
|
AmazonBinarySensorEntityDescription(
|
||||||
key="dogBarkDetectionState",
|
key="dogBarkDetectionState",
|
||||||
translation_key="dog_bark_detection",
|
translation_key="dog_bark_detection",
|
||||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
is_on_fn=lambda device, key: False,
|
||||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
|
||||||
),
|
|
||||||
AmazonBinarySensorEntityDescription(
|
|
||||||
key="humanPresenceDetectionState",
|
|
||||||
device_class=BinarySensorDeviceClass.MOTION,
|
|
||||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
|
||||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
|
||||||
),
|
),
|
||||||
AmazonBinarySensorEntityDescription(
|
AmazonBinarySensorEntityDescription(
|
||||||
key="waterSoundsDetectionState",
|
key="waterSoundsDetectionState",
|
||||||
translation_key="water_sounds_detection",
|
translation_key="water_sounds_detection",
|
||||||
is_on_fn=lambda device, key: (device.sensors[key].value != SENSOR_STATE_OFF),
|
is_on_fn=lambda device, key: False,
|
||||||
is_supported=lambda device, key: device.sensors.get(key) is not None,
|
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -94,13 +104,46 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
async_add_entities(
|
entity_registry = er.async_get(hass)
|
||||||
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
|
|
||||||
for sensor_desc in BINARY_SENSORS
|
# Replace unique id for "detectionState" binary sensor
|
||||||
for serial_num in coordinator.data
|
await async_update_unique_id(
|
||||||
if sensor_desc.is_supported(coordinator.data[serial_num], sensor_desc.key)
|
hass,
|
||||||
|
coordinator,
|
||||||
|
BINARY_SENSOR_DOMAIN,
|
||||||
|
"humanPresenceDetectionState",
|
||||||
|
"detectionState",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Clean up deprecated sensors
|
||||||
|
for sensor_desc in DEPRECATED_BINARY_SENSORS:
|
||||||
|
for serial_num in coordinator.data:
|
||||||
|
unique_id = f"{serial_num}-{sensor_desc.key}"
|
||||||
|
if entity_id := entity_registry.async_get_entity_id(
|
||||||
|
BINARY_SENSOR_DOMAIN, DOMAIN, unique_id
|
||||||
|
):
|
||||||
|
_LOGGER.debug("Removing deprecated entity %s", entity_id)
|
||||||
|
entity_registry.async_remove(entity_id)
|
||||||
|
|
||||||
|
known_devices: set[str] = set()
|
||||||
|
|
||||||
|
def _check_device() -> None:
|
||||||
|
current_devices = set(coordinator.data)
|
||||||
|
new_devices = current_devices - known_devices
|
||||||
|
if new_devices:
|
||||||
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
AmazonBinarySensorEntity(coordinator, serial_num, sensor_desc)
|
||||||
|
for sensor_desc in BINARY_SENSORS
|
||||||
|
for serial_num in new_devices
|
||||||
|
if sensor_desc.is_supported(
|
||||||
|
coordinator.data[serial_num], sensor_desc.key
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
|
class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
|
||||||
"""Binary sensor device."""
|
"""Binary sensor device."""
|
||||||
@@ -113,3 +156,13 @@ class AmazonBinarySensorEntity(AmazonEntity, BinarySensorEntity):
|
|||||||
return self.entity_description.is_on_fn(
|
return self.entity_description.is_on_fn(
|
||||||
self.device, self.entity_description.key
|
self.device, self.entity_description.key
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return if entity is available."""
|
||||||
|
return (
|
||||||
|
self.entity_description.is_available_fn(
|
||||||
|
self.device, self.entity_description.key
|
||||||
|
)
|
||||||
|
and super().available
|
||||||
|
)
|
||||||
|
|||||||
@@ -64,7 +64,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
data = await validate_input(self.hass, user_input)
|
data = await validate_input(self.hass, user_input)
|
||||||
except CannotConnect:
|
except CannotConnect:
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except (CannotAuthenticate, TypeError):
|
except CannotAuthenticate:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except CannotRetrieveData:
|
except CannotRetrieveData:
|
||||||
errors["base"] = "cannot_retrieve_data"
|
errors["base"] = "cannot_retrieve_data"
|
||||||
@@ -112,7 +112,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
except CannotConnect:
|
except CannotConnect:
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except (CannotAuthenticate, TypeError):
|
except CannotAuthenticate:
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except CannotRetrieveData:
|
except CannotRetrieveData:
|
||||||
errors["base"] = "cannot_retrieve_data"
|
errors["base"] = "cannot_retrieve_data"
|
||||||
|
|||||||
@@ -68,7 +68,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
|||||||
translation_key="cannot_retrieve_data_with_error",
|
translation_key="cannot_retrieve_data_with_error",
|
||||||
translation_placeholders={"error": repr(err)},
|
translation_placeholders={"error": repr(err)},
|
||||||
) from err
|
) from err
|
||||||
except (CannotAuthenticate, TypeError) as err:
|
except CannotAuthenticate as err:
|
||||||
raise ConfigEntryAuthFailed(
|
raise ConfigEntryAuthFailed(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="invalid_auth",
|
translation_key="invalid_auth",
|
||||||
|
|||||||
@@ -60,7 +60,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
|||||||
"online": device.online,
|
"online": device.online,
|
||||||
"serial number": device.serial_number,
|
"serial number": device.serial_number,
|
||||||
"software version": device.software_version,
|
"software version": device.software_version,
|
||||||
"do not disturb": device.do_not_disturb,
|
"sensors": device.sensors,
|
||||||
"response style": device.response_style,
|
|
||||||
"bluetooth state": device.bluetooth_state,
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,44 +1,4 @@
|
|||||||
{
|
{
|
||||||
"entity": {
|
|
||||||
"binary_sensor": {
|
|
||||||
"bluetooth": {
|
|
||||||
"default": "mdi:bluetooth-off",
|
|
||||||
"state": {
|
|
||||||
"on": "mdi:bluetooth"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"baby_cry_detection": {
|
|
||||||
"default": "mdi:account-voice-off",
|
|
||||||
"state": {
|
|
||||||
"on": "mdi:account-voice"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"beeping_appliance_detection": {
|
|
||||||
"default": "mdi:bell-off",
|
|
||||||
"state": {
|
|
||||||
"on": "mdi:bell-ring"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"cough_detection": {
|
|
||||||
"default": "mdi:blur-off",
|
|
||||||
"state": {
|
|
||||||
"on": "mdi:blur"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"dog_bark_detection": {
|
|
||||||
"default": "mdi:dog-side-off",
|
|
||||||
"state": {
|
|
||||||
"on": "mdi:dog-side"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"water_sounds_detection": {
|
|
||||||
"default": "mdi:water-pump-off",
|
|
||||||
"state": {
|
|
||||||
"on": "mdi:water-pump"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"services": {
|
"services": {
|
||||||
"send_sound": {
|
"send_sound": {
|
||||||
"service": "mdi:cast-audio"
|
"service": "mdi:cast-audio"
|
||||||
|
|||||||
@@ -7,6 +7,6 @@
|
|||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["aioamazondevices"],
|
"loggers": ["aioamazondevices"],
|
||||||
"quality_scale": "silver",
|
"quality_scale": "platinum",
|
||||||
"requirements": ["aioamazondevices==6.0.0"]
|
"requirements": ["aioamazondevices==6.4.6"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -57,13 +57,23 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
async_add_entities(
|
known_devices: set[str] = set()
|
||||||
AmazonNotifyEntity(coordinator, serial_num, sensor_desc)
|
|
||||||
for sensor_desc in NOTIFY
|
def _check_device() -> None:
|
||||||
for serial_num in coordinator.data
|
current_devices = set(coordinator.data)
|
||||||
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
|
new_devices = current_devices - known_devices
|
||||||
and sensor_desc.is_supported(coordinator.data[serial_num])
|
if new_devices:
|
||||||
)
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
AmazonNotifyEntity(coordinator, serial_num, sensor_desc)
|
||||||
|
for sensor_desc in NOTIFY
|
||||||
|
for serial_num in new_devices
|
||||||
|
if sensor_desc.subkey in coordinator.data[serial_num].capabilities
|
||||||
|
and sensor_desc.is_supported(coordinator.data[serial_num])
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class AmazonNotifyEntity(AmazonEntity, NotifyEntity):
|
class AmazonNotifyEntity(AmazonEntity, NotifyEntity):
|
||||||
|
|||||||
@@ -53,7 +53,7 @@ rules:
|
|||||||
docs-supported-functions: done
|
docs-supported-functions: done
|
||||||
docs-troubleshooting: done
|
docs-troubleshooting: done
|
||||||
docs-use-cases: done
|
docs-use-cases: done
|
||||||
dynamic-devices: todo
|
dynamic-devices: done
|
||||||
entity-category: done
|
entity-category: done
|
||||||
entity-device-class: done
|
entity-device-class: done
|
||||||
entity-disabled-by-default: done
|
entity-disabled-by-default: done
|
||||||
|
|||||||
@@ -31,15 +31,20 @@ class AmazonSensorEntityDescription(SensorEntityDescription):
|
|||||||
"""Amazon Devices sensor entity description."""
|
"""Amazon Devices sensor entity description."""
|
||||||
|
|
||||||
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
native_unit_of_measurement_fn: Callable[[AmazonDevice, str], str] | None = None
|
||||||
|
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||||
|
device.online
|
||||||
|
and (sensor := device.sensors.get(key)) is not None
|
||||||
|
and sensor.error is False
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
SENSORS: Final = (
|
SENSORS: Final = (
|
||||||
AmazonSensorEntityDescription(
|
AmazonSensorEntityDescription(
|
||||||
key="temperature",
|
key="temperature",
|
||||||
device_class=SensorDeviceClass.TEMPERATURE,
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
native_unit_of_measurement_fn=lambda device, _key: (
|
native_unit_of_measurement_fn=lambda device, key: (
|
||||||
UnitOfTemperature.CELSIUS
|
UnitOfTemperature.CELSIUS
|
||||||
if device.sensors[_key].scale == "CELSIUS"
|
if key in device.sensors and device.sensors[key].scale == "CELSIUS"
|
||||||
else UnitOfTemperature.FAHRENHEIT
|
else UnitOfTemperature.FAHRENHEIT
|
||||||
),
|
),
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
@@ -62,12 +67,22 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
async_add_entities(
|
known_devices: set[str] = set()
|
||||||
AmazonSensorEntity(coordinator, serial_num, sensor_desc)
|
|
||||||
for sensor_desc in SENSORS
|
def _check_device() -> None:
|
||||||
for serial_num in coordinator.data
|
current_devices = set(coordinator.data)
|
||||||
if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None
|
new_devices = current_devices - known_devices
|
||||||
)
|
if new_devices:
|
||||||
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
AmazonSensorEntity(coordinator, serial_num, sensor_desc)
|
||||||
|
for sensor_desc in SENSORS
|
||||||
|
for serial_num in new_devices
|
||||||
|
if coordinator.data[serial_num].sensors.get(sensor_desc.key) is not None
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class AmazonSensorEntity(AmazonEntity, SensorEntity):
|
class AmazonSensorEntity(AmazonEntity, SensorEntity):
|
||||||
@@ -89,3 +104,13 @@ class AmazonSensorEntity(AmazonEntity, SensorEntity):
|
|||||||
def native_value(self) -> StateType:
|
def native_value(self) -> StateType:
|
||||||
"""Return the state of the sensor."""
|
"""Return the state of the sensor."""
|
||||||
return self.device.sensors[self.entity_description.key].value
|
return self.device.sensors[self.entity_description.key].value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return if entity is available."""
|
||||||
|
return (
|
||||||
|
self.entity_description.is_available_fn(
|
||||||
|
self.device, self.entity_description.key
|
||||||
|
)
|
||||||
|
and super().available
|
||||||
|
)
|
||||||
|
|||||||
@@ -58,26 +58,6 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
"binary_sensor": {
|
|
||||||
"bluetooth": {
|
|
||||||
"name": "Bluetooth"
|
|
||||||
},
|
|
||||||
"baby_cry_detection": {
|
|
||||||
"name": "Baby crying"
|
|
||||||
},
|
|
||||||
"beeping_appliance_detection": {
|
|
||||||
"name": "Beeping appliance"
|
|
||||||
},
|
|
||||||
"cough_detection": {
|
|
||||||
"name": "Coughing"
|
|
||||||
},
|
|
||||||
"dog_bark_detection": {
|
|
||||||
"name": "Dog barking"
|
|
||||||
},
|
|
||||||
"water_sounds_detection": {
|
|
||||||
"name": "Water sounds"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"notify": {
|
"notify": {
|
||||||
"speak": {
|
"speak": {
|
||||||
"name": "Speak"
|
"name": "Speak"
|
||||||
|
|||||||
@@ -8,13 +8,21 @@ from typing import TYPE_CHECKING, Any, Final
|
|||||||
|
|
||||||
from aioamazondevices.api import AmazonDevice
|
from aioamazondevices.api import AmazonDevice
|
||||||
|
|
||||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
from homeassistant.components.switch import (
|
||||||
|
DOMAIN as SWITCH_DOMAIN,
|
||||||
|
SwitchEntity,
|
||||||
|
SwitchEntityDescription,
|
||||||
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
from .coordinator import AmazonConfigEntry
|
from .coordinator import AmazonConfigEntry
|
||||||
from .entity import AmazonEntity
|
from .entity import AmazonEntity
|
||||||
from .utils import alexa_api_call
|
from .utils import (
|
||||||
|
alexa_api_call,
|
||||||
|
async_remove_dnd_from_virtual_group,
|
||||||
|
async_update_unique_id,
|
||||||
|
)
|
||||||
|
|
||||||
PARALLEL_UPDATES = 1
|
PARALLEL_UPDATES = 1
|
||||||
|
|
||||||
@@ -24,16 +32,19 @@ class AmazonSwitchEntityDescription(SwitchEntityDescription):
|
|||||||
"""Alexa Devices switch entity description."""
|
"""Alexa Devices switch entity description."""
|
||||||
|
|
||||||
is_on_fn: Callable[[AmazonDevice], bool]
|
is_on_fn: Callable[[AmazonDevice], bool]
|
||||||
subkey: str
|
is_available_fn: Callable[[AmazonDevice, str], bool] = lambda device, key: (
|
||||||
|
device.online
|
||||||
|
and (sensor := device.sensors.get(key)) is not None
|
||||||
|
and sensor.error is False
|
||||||
|
)
|
||||||
method: str
|
method: str
|
||||||
|
|
||||||
|
|
||||||
SWITCHES: Final = (
|
SWITCHES: Final = (
|
||||||
AmazonSwitchEntityDescription(
|
AmazonSwitchEntityDescription(
|
||||||
key="do_not_disturb",
|
key="dnd",
|
||||||
subkey="AUDIO_PLAYER",
|
|
||||||
translation_key="do_not_disturb",
|
translation_key="do_not_disturb",
|
||||||
is_on_fn=lambda _device: _device.do_not_disturb,
|
is_on_fn=lambda device: bool(device.sensors["dnd"].value),
|
||||||
method="set_do_not_disturb",
|
method="set_do_not_disturb",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -48,13 +59,31 @@ async def async_setup_entry(
|
|||||||
|
|
||||||
coordinator = entry.runtime_data
|
coordinator = entry.runtime_data
|
||||||
|
|
||||||
async_add_entities(
|
# Replace unique id for "DND" switch and remove from Speaker Group
|
||||||
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
|
await async_update_unique_id(
|
||||||
for switch_desc in SWITCHES
|
hass, coordinator, SWITCH_DOMAIN, "do_not_disturb", "dnd"
|
||||||
for serial_num in coordinator.data
|
|
||||||
if switch_desc.subkey in coordinator.data[serial_num].capabilities
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Remove DND switch from virtual groups
|
||||||
|
await async_remove_dnd_from_virtual_group(hass, coordinator)
|
||||||
|
|
||||||
|
known_devices: set[str] = set()
|
||||||
|
|
||||||
|
def _check_device() -> None:
|
||||||
|
current_devices = set(coordinator.data)
|
||||||
|
new_devices = current_devices - known_devices
|
||||||
|
if new_devices:
|
||||||
|
known_devices.update(new_devices)
|
||||||
|
async_add_entities(
|
||||||
|
AmazonSwitchEntity(coordinator, serial_num, switch_desc)
|
||||||
|
for switch_desc in SWITCHES
|
||||||
|
for serial_num in new_devices
|
||||||
|
if switch_desc.key in coordinator.data[serial_num].sensors
|
||||||
|
)
|
||||||
|
|
||||||
|
_check_device()
|
||||||
|
entry.async_on_unload(coordinator.async_add_listener(_check_device))
|
||||||
|
|
||||||
|
|
||||||
class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
||||||
"""Switch device."""
|
"""Switch device."""
|
||||||
@@ -84,3 +113,13 @@ class AmazonSwitchEntity(AmazonEntity, SwitchEntity):
|
|||||||
def is_on(self) -> bool:
|
def is_on(self) -> bool:
|
||||||
"""Return True if switch is on."""
|
"""Return True if switch is on."""
|
||||||
return self.entity_description.is_on_fn(self.device)
|
return self.entity_description.is_on_fn(self.device)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def available(self) -> bool:
|
||||||
|
"""Return if entity is available."""
|
||||||
|
return (
|
||||||
|
self.entity_description.is_available_fn(
|
||||||
|
self.device, self.entity_description.key
|
||||||
|
)
|
||||||
|
and super().available
|
||||||
|
)
|
||||||
|
|||||||
@@ -4,11 +4,16 @@ from collections.abc import Awaitable, Callable, Coroutine
|
|||||||
from functools import wraps
|
from functools import wraps
|
||||||
from typing import Any, Concatenate
|
from typing import Any, Concatenate
|
||||||
|
|
||||||
|
from aioamazondevices.const import SPEAKER_GROUP_FAMILY
|
||||||
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
from aioamazondevices.exceptions import CannotConnect, CannotRetrieveData
|
||||||
|
|
||||||
|
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
|
import homeassistant.helpers.entity_registry as er
|
||||||
|
|
||||||
from .const import DOMAIN
|
from .const import _LOGGER, DOMAIN
|
||||||
|
from .coordinator import AmazonDevicesCoordinator
|
||||||
from .entity import AmazonEntity
|
from .entity import AmazonEntity
|
||||||
|
|
||||||
|
|
||||||
@@ -38,3 +43,41 @@ def alexa_api_call[_T: AmazonEntity, **_P](
|
|||||||
) from err
|
) from err
|
||||||
|
|
||||||
return cmd_wrapper
|
return cmd_wrapper
|
||||||
|
|
||||||
|
|
||||||
|
async def async_update_unique_id(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
coordinator: AmazonDevicesCoordinator,
|
||||||
|
domain: str,
|
||||||
|
old_key: str,
|
||||||
|
new_key: str,
|
||||||
|
) -> None:
|
||||||
|
"""Update unique id for entities created with old format."""
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
|
|
||||||
|
for serial_num in coordinator.data:
|
||||||
|
unique_id = f"{serial_num}-{old_key}"
|
||||||
|
if entity_id := entity_registry.async_get_entity_id(domain, DOMAIN, unique_id):
|
||||||
|
_LOGGER.debug("Updating unique_id for %s", entity_id)
|
||||||
|
new_unique_id = unique_id.replace(old_key, new_key)
|
||||||
|
|
||||||
|
# Update the registry with the new unique_id
|
||||||
|
entity_registry.async_update_entity(entity_id, new_unique_id=new_unique_id)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_remove_dnd_from_virtual_group(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
coordinator: AmazonDevicesCoordinator,
|
||||||
|
) -> None:
|
||||||
|
"""Remove entity DND from virtual group."""
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
|
|
||||||
|
for serial_num in coordinator.data:
|
||||||
|
unique_id = f"{serial_num}-do_not_disturb"
|
||||||
|
entity_id = entity_registry.async_get_entity_id(
|
||||||
|
DOMAIN, SWITCH_DOMAIN, unique_id
|
||||||
|
)
|
||||||
|
is_group = coordinator.data[serial_num].device_family == SPEAKER_GROUP_FAMILY
|
||||||
|
if entity_id and is_group:
|
||||||
|
entity_registry.async_remove(entity_id)
|
||||||
|
_LOGGER.debug("Removed DND switch from virtual group %s", entity_id)
|
||||||
|
|||||||
@@ -65,6 +65,31 @@ SENSOR_DESCRIPTIONS = [
|
|||||||
suggested_display_precision=2,
|
suggested_display_precision=2,
|
||||||
translation_placeholders={"sensor_name": "BME280"},
|
translation_placeholders={"sensor_name": "BME280"},
|
||||||
),
|
),
|
||||||
|
AltruistSensorEntityDescription(
|
||||||
|
device_class=SensorDeviceClass.HUMIDITY,
|
||||||
|
key="BME680_humidity",
|
||||||
|
translation_key="humidity",
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
suggested_display_precision=2,
|
||||||
|
translation_placeholders={"sensor_name": "BME680"},
|
||||||
|
),
|
||||||
|
AltruistSensorEntityDescription(
|
||||||
|
device_class=SensorDeviceClass.PRESSURE,
|
||||||
|
key="BME680_pressure",
|
||||||
|
translation_key="pressure",
|
||||||
|
native_unit_of_measurement=UnitOfPressure.PA,
|
||||||
|
suggested_unit_of_measurement=UnitOfPressure.MMHG,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
translation_placeholders={"sensor_name": "BME680"},
|
||||||
|
),
|
||||||
|
AltruistSensorEntityDescription(
|
||||||
|
device_class=SensorDeviceClass.TEMPERATURE,
|
||||||
|
key="BME680_temperature",
|
||||||
|
translation_key="temperature",
|
||||||
|
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||||
|
suggested_display_precision=2,
|
||||||
|
translation_placeholders={"sensor_name": "BME680"},
|
||||||
|
),
|
||||||
AltruistSensorEntityDescription(
|
AltruistSensorEntityDescription(
|
||||||
device_class=SensorDeviceClass.PRESSURE,
|
device_class=SensorDeviceClass.PRESSURE,
|
||||||
key="BMP_pressure",
|
key="BMP_pressure",
|
||||||
|
|||||||
@@ -39,7 +39,7 @@ from homeassistant.helpers.hassio import is_hassio
|
|||||||
from homeassistant.helpers.singleton import singleton
|
from homeassistant.helpers.singleton import singleton
|
||||||
from homeassistant.helpers.storage import Store
|
from homeassistant.helpers.storage import Store
|
||||||
from homeassistant.helpers.system_info import async_get_system_info
|
from homeassistant.helpers.system_info import async_get_system_info
|
||||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
from homeassistant.helpers.typing import UNDEFINED
|
||||||
from homeassistant.loader import (
|
from homeassistant.loader import (
|
||||||
Integration,
|
Integration,
|
||||||
IntegrationNotFound,
|
IntegrationNotFound,
|
||||||
@@ -142,7 +142,6 @@ class EntityAnalyticsModifications:
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
remove: bool = False
|
remove: bool = False
|
||||||
capabilities: dict[str, Any] | None | UndefinedType = UNDEFINED
|
|
||||||
|
|
||||||
|
|
||||||
class AnalyticsPlatformProtocol(Protocol):
|
class AnalyticsPlatformProtocol(Protocol):
|
||||||
@@ -514,6 +513,8 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
|||||||
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
|
integration_inputs: dict[str, tuple[list[str], list[str]]] = {}
|
||||||
integration_configs: dict[str, AnalyticsModifications] = {}
|
integration_configs: dict[str, AnalyticsModifications] = {}
|
||||||
|
|
||||||
|
removed_devices: set[str] = set()
|
||||||
|
|
||||||
# Get device list
|
# Get device list
|
||||||
for device_entry in dev_reg.devices.values():
|
for device_entry in dev_reg.devices.values():
|
||||||
if not device_entry.primary_config_entry:
|
if not device_entry.primary_config_entry:
|
||||||
@@ -526,6 +527,10 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
|||||||
if config_entry is None:
|
if config_entry is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
if device_entry.entry_type is dr.DeviceEntryType.SERVICE:
|
||||||
|
removed_devices.add(device_entry.id)
|
||||||
|
continue
|
||||||
|
|
||||||
integration_domain = config_entry.domain
|
integration_domain = config_entry.domain
|
||||||
|
|
||||||
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
||||||
@@ -538,6 +543,23 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
|||||||
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
integration_input = integration_inputs.setdefault(integration_domain, ([], []))
|
||||||
integration_input[1].append(entity_entry.entity_id)
|
integration_input[1].append(entity_entry.entity_id)
|
||||||
|
|
||||||
|
integrations = {
|
||||||
|
domain: integration
|
||||||
|
for domain, integration in (
|
||||||
|
await async_get_integrations(hass, integration_inputs.keys())
|
||||||
|
).items()
|
||||||
|
if isinstance(integration, Integration)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Filter out custom integrations and integrations that are not device or hub type
|
||||||
|
integration_inputs = {
|
||||||
|
domain: integration_info
|
||||||
|
for domain, integration_info in integration_inputs.items()
|
||||||
|
if (integration := integrations.get(domain)) is not None
|
||||||
|
and integration.is_built_in
|
||||||
|
and integration.manifest.get("integration_type") in ("device", "hub")
|
||||||
|
}
|
||||||
|
|
||||||
# Call integrations that implement the analytics platform
|
# Call integrations that implement the analytics platform
|
||||||
for integration_domain, integration_input in integration_inputs.items():
|
for integration_domain, integration_input in integration_inputs.items():
|
||||||
if (
|
if (
|
||||||
@@ -598,15 +620,15 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
|||||||
device_config = integration_config.devices.get(device_id, device_config)
|
device_config = integration_config.devices.get(device_id, device_config)
|
||||||
|
|
||||||
if device_config.remove:
|
if device_config.remove:
|
||||||
|
removed_devices.add(device_id)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
device_entry = dev_reg.devices[device_id]
|
device_entry = dev_reg.devices[device_id]
|
||||||
|
|
||||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
device_id_mapping[device_id] = (integration_domain, len(devices_info))
|
||||||
|
|
||||||
devices_info.append(
|
devices_info.append(
|
||||||
{
|
{
|
||||||
"entities": [],
|
|
||||||
"entry_type": device_entry.entry_type,
|
"entry_type": device_entry.entry_type,
|
||||||
"has_configuration_url": device_entry.configuration_url is not None,
|
"has_configuration_url": device_entry.configuration_url is not None,
|
||||||
"hw_version": device_entry.hw_version,
|
"hw_version": device_entry.hw_version,
|
||||||
@@ -615,6 +637,7 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
|||||||
"model_id": device_entry.model_id,
|
"model_id": device_entry.model_id,
|
||||||
"sw_version": device_entry.sw_version,
|
"sw_version": device_entry.sw_version,
|
||||||
"via_device": device_entry.via_device_id,
|
"via_device": device_entry.via_device_id,
|
||||||
|
"entities": [],
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -653,57 +676,40 @@ async def async_devices_payload(hass: HomeAssistant) -> dict: # noqa: C901
|
|||||||
|
|
||||||
entity_entry = ent_reg.entities[entity_id]
|
entity_entry = ent_reg.entities[entity_id]
|
||||||
|
|
||||||
entity_state = hass.states.get(entity_entry.entity_id)
|
entity_state = hass.states.get(entity_id)
|
||||||
|
|
||||||
entity_info = {
|
entity_info = {
|
||||||
# LIMITATION: `assumed_state` can be overridden by users;
|
# LIMITATION: `assumed_state` can be overridden by users;
|
||||||
# we should replace it with the original value in the future.
|
# we should replace it with the original value in the future.
|
||||||
# It is also not present, if entity is not in the state machine,
|
# It is also not present, if entity is not in the state machine,
|
||||||
# which can happen for disabled entities.
|
# which can happen for disabled entities.
|
||||||
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
|
"assumed_state": (
|
||||||
if entity_state is not None
|
entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
|
||||||
else None,
|
if entity_state is not None
|
||||||
"capabilities": entity_config.capabilities
|
else None
|
||||||
if entity_config.capabilities is not UNDEFINED
|
),
|
||||||
else entity_entry.capabilities,
|
|
||||||
"domain": entity_entry.domain,
|
"domain": entity_entry.domain,
|
||||||
"entity_category": entity_entry.entity_category,
|
"entity_category": entity_entry.entity_category,
|
||||||
"has_entity_name": entity_entry.has_entity_name,
|
"has_entity_name": entity_entry.has_entity_name,
|
||||||
"modified_by_integration": ["capabilities"]
|
|
||||||
if entity_config.capabilities is not UNDEFINED
|
|
||||||
else None,
|
|
||||||
"original_device_class": entity_entry.original_device_class,
|
"original_device_class": entity_entry.original_device_class,
|
||||||
# LIMITATION: `unit_of_measurement` can be overridden by users;
|
# LIMITATION: `unit_of_measurement` can be overridden by users;
|
||||||
# we should replace it with the original value in the future.
|
# we should replace it with the original value in the future.
|
||||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||||
}
|
}
|
||||||
|
|
||||||
if (
|
if (device_id_ := entity_entry.device_id) is not None:
|
||||||
((device_id_ := entity_entry.device_id) is not None)
|
if device_id_ in removed_devices:
|
||||||
and ((new_device_id := device_id_mapping.get(device_id_)) is not None)
|
# The device was removed, so we remove the entity too
|
||||||
and (new_device_id[0] == integration_domain)
|
continue
|
||||||
):
|
|
||||||
device_info = devices_info[new_device_id[1]]
|
|
||||||
device_info["entities"].append(entity_info)
|
|
||||||
else:
|
|
||||||
entities_info.append(entity_info)
|
|
||||||
|
|
||||||
integrations = {
|
if (
|
||||||
domain: integration
|
new_device_id := device_id_mapping.get(device_id_)
|
||||||
for domain, integration in (
|
) is not None and (new_device_id[0] == integration_domain):
|
||||||
await async_get_integrations(hass, integrations_info.keys())
|
device_info = devices_info[new_device_id[1]]
|
||||||
).items()
|
device_info["entities"].append(entity_info)
|
||||||
if isinstance(integration, Integration)
|
continue
|
||||||
}
|
|
||||||
|
|
||||||
for domain, integration_info in integrations_info.items():
|
entities_info.append(entity_info)
|
||||||
if integration := integrations.get(domain):
|
|
||||||
integration_info["is_custom_integration"] = not integration.is_built_in
|
|
||||||
# Include version for custom integrations
|
|
||||||
if not integration.is_built_in and integration.version:
|
|
||||||
integration_info["custom_integration_version"] = str(
|
|
||||||
integration.version
|
|
||||||
)
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"version": "home-assistant:1",
|
"version": "home-assistant:1",
|
||||||
|
|||||||
@@ -41,6 +41,11 @@ APPS_NEW_ID = "add_new"
|
|||||||
CONF_APP_DELETE = "app_delete"
|
CONF_APP_DELETE = "app_delete"
|
||||||
CONF_APP_ID = "app_id"
|
CONF_APP_ID = "app_id"
|
||||||
|
|
||||||
|
_EXAMPLE_APP_ID = "com.plexapp.android"
|
||||||
|
_EXAMPLE_APP_PLAY_STORE_URL = (
|
||||||
|
f"https://play.google.com/store/apps/details?id={_EXAMPLE_APP_ID}"
|
||||||
|
)
|
||||||
|
|
||||||
STEP_PAIR_DATA_SCHEMA = vol.Schema(
|
STEP_PAIR_DATA_SCHEMA = vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required("pin"): str,
|
vol.Required("pin"): str,
|
||||||
@@ -355,5 +360,7 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
|||||||
data_schema=data_schema,
|
data_schema=data_schema,
|
||||||
description_placeholders={
|
description_placeholders={
|
||||||
"app_id": f"`{app_id}`" if app_id != APPS_NEW_ID else "",
|
"app_id": f"`{app_id}`" if app_id != APPS_NEW_ID else "",
|
||||||
|
"example_app_id": _EXAMPLE_APP_ID,
|
||||||
|
"example_app_play_store_url": _EXAMPLE_APP_PLAY_STORE_URL,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -75,7 +75,7 @@
|
|||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"app_name": "Name of the application as you would like it to be displayed in Home Assistant.",
|
"app_name": "Name of the application as you would like it to be displayed in Home Assistant.",
|
||||||
"app_id": "E.g. com.plexapp.android for https://play.google.com/store/apps/details?id=com.plexapp.android",
|
"app_id": "E.g. {example_app_id} for {example_app_play_store_url}",
|
||||||
"app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename",
|
"app_icon": "Image URL. From the Play Store app page, right click on the icon and select 'Copy image address' and then paste it here. Alternatively, download the image, upload it under /config/www/ and use the URL /local/filename",
|
||||||
"app_delete": "Check this box to delete the application from the list."
|
"app_delete": "Check this box to delete the application from the list."
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,12 +4,15 @@ from __future__ import annotations
|
|||||||
|
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
import json
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
from voluptuous_openapi import convert
|
||||||
|
|
||||||
|
from homeassistant.components.zone import ENTITY_ID_HOME
|
||||||
from homeassistant.config_entries import (
|
from homeassistant.config_entries import (
|
||||||
ConfigEntry,
|
ConfigEntry,
|
||||||
ConfigEntryState,
|
ConfigEntryState,
|
||||||
@@ -18,7 +21,13 @@ from homeassistant.config_entries import (
|
|||||||
ConfigSubentryFlow,
|
ConfigSubentryFlow,
|
||||||
SubentryFlowResult,
|
SubentryFlowResult,
|
||||||
)
|
)
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API, CONF_NAME
|
from homeassistant.const import (
|
||||||
|
ATTR_LATITUDE,
|
||||||
|
ATTR_LONGITUDE,
|
||||||
|
CONF_API_KEY,
|
||||||
|
CONF_LLM_HASS_API,
|
||||||
|
CONF_NAME,
|
||||||
|
)
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.helpers import llm
|
from homeassistant.helpers import llm
|
||||||
from homeassistant.helpers.selector import (
|
from homeassistant.helpers.selector import (
|
||||||
@@ -37,12 +46,23 @@ from .const import (
|
|||||||
CONF_RECOMMENDED,
|
CONF_RECOMMENDED,
|
||||||
CONF_TEMPERATURE,
|
CONF_TEMPERATURE,
|
||||||
CONF_THINKING_BUDGET,
|
CONF_THINKING_BUDGET,
|
||||||
|
CONF_WEB_SEARCH,
|
||||||
|
CONF_WEB_SEARCH_CITY,
|
||||||
|
CONF_WEB_SEARCH_COUNTRY,
|
||||||
|
CONF_WEB_SEARCH_MAX_USES,
|
||||||
|
CONF_WEB_SEARCH_REGION,
|
||||||
|
CONF_WEB_SEARCH_TIMEZONE,
|
||||||
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
DEFAULT_CONVERSATION_NAME,
|
DEFAULT_CONVERSATION_NAME,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
RECOMMENDED_TEMPERATURE,
|
RECOMMENDED_TEMPERATURE,
|
||||||
RECOMMENDED_THINKING_BUDGET,
|
RECOMMENDED_THINKING_BUDGET,
|
||||||
|
RECOMMENDED_WEB_SEARCH,
|
||||||
|
RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||||
|
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||||
|
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -168,6 +188,14 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
|||||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||||
|
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH):
|
||||||
|
model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
|
if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||||
|
errors[CONF_WEB_SEARCH] = "web_search_unsupported_model"
|
||||||
|
elif user_input.get(
|
||||||
|
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||||
|
):
|
||||||
|
user_input.update(await self._get_location_data())
|
||||||
|
|
||||||
if not errors:
|
if not errors:
|
||||||
if self._is_new:
|
if self._is_new:
|
||||||
@@ -215,6 +243,68 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
|||||||
errors=errors or None,
|
errors=errors or None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def _get_location_data(self) -> dict[str, str]:
|
||||||
|
"""Get approximate location data of the user."""
|
||||||
|
location_data: dict[str, str] = {}
|
||||||
|
zone_home = self.hass.states.get(ENTITY_ID_HOME)
|
||||||
|
if zone_home is not None:
|
||||||
|
client = await self.hass.async_add_executor_job(
|
||||||
|
partial(
|
||||||
|
anthropic.AsyncAnthropic,
|
||||||
|
api_key=self._get_entry().data[CONF_API_KEY],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
location_schema = vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH_CITY,
|
||||||
|
description="Free text input for the city, e.g. `San Francisco`",
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH_REGION,
|
||||||
|
description="Free text input for the region, e.g. `California`",
|
||||||
|
): str,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
response = await client.messages.create(
|
||||||
|
model=RECOMMENDED_CHAT_MODEL,
|
||||||
|
messages=[
|
||||||
|
{
|
||||||
|
"role": "user",
|
||||||
|
"content": "Where are the following coordinates located: "
|
||||||
|
f"({zone_home.attributes[ATTR_LATITUDE]},"
|
||||||
|
f" {zone_home.attributes[ATTR_LONGITUDE]})? Please respond "
|
||||||
|
"only with a JSON object using the following schema:\n"
|
||||||
|
f"{convert(location_schema)}",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"role": "assistant",
|
||||||
|
"content": "{", # hints the model to skip any preamble
|
||||||
|
},
|
||||||
|
],
|
||||||
|
max_tokens=RECOMMENDED_MAX_TOKENS,
|
||||||
|
)
|
||||||
|
_LOGGER.debug("Model response: %s", response.content)
|
||||||
|
location_data = location_schema(
|
||||||
|
json.loads(
|
||||||
|
"{"
|
||||||
|
+ "".join(
|
||||||
|
block.text
|
||||||
|
for block in response.content
|
||||||
|
if isinstance(block, anthropic.types.TextBlock)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
or {}
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.hass.config.country:
|
||||||
|
location_data[CONF_WEB_SEARCH_COUNTRY] = self.hass.config.country
|
||||||
|
location_data[CONF_WEB_SEARCH_TIMEZONE] = self.hass.config.time_zone
|
||||||
|
|
||||||
|
_LOGGER.debug("Location data: %s", location_data)
|
||||||
|
|
||||||
|
return location_data
|
||||||
|
|
||||||
async_step_user = async_step_set_options
|
async_step_user = async_step_set_options
|
||||||
async_step_reconfigure = async_step_set_options
|
async_step_reconfigure = async_step_set_options
|
||||||
|
|
||||||
@@ -273,6 +363,18 @@ def anthropic_config_option_schema(
|
|||||||
CONF_THINKING_BUDGET,
|
CONF_THINKING_BUDGET,
|
||||||
default=RECOMMENDED_THINKING_BUDGET,
|
default=RECOMMENDED_THINKING_BUDGET,
|
||||||
): int,
|
): int,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH,
|
||||||
|
): bool,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH_MAX_USES,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||||
|
): int,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
|
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||||
|
): bool,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
return schema
|
return schema
|
||||||
|
|||||||
@@ -18,10 +18,26 @@ RECOMMENDED_TEMPERATURE = 1.0
|
|||||||
CONF_THINKING_BUDGET = "thinking_budget"
|
CONF_THINKING_BUDGET = "thinking_budget"
|
||||||
RECOMMENDED_THINKING_BUDGET = 0
|
RECOMMENDED_THINKING_BUDGET = 0
|
||||||
MIN_THINKING_BUDGET = 1024
|
MIN_THINKING_BUDGET = 1024
|
||||||
|
CONF_WEB_SEARCH = "web_search"
|
||||||
|
RECOMMENDED_WEB_SEARCH = False
|
||||||
|
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||||
|
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
|
||||||
|
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||||
|
RECOMMENDED_WEB_SEARCH_MAX_USES = 5
|
||||||
|
CONF_WEB_SEARCH_CITY = "city"
|
||||||
|
CONF_WEB_SEARCH_REGION = "region"
|
||||||
|
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||||
|
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||||
|
|
||||||
THINKING_MODELS = [
|
NON_THINKING_MODELS = [
|
||||||
"claude-3-7-sonnet",
|
"claude-3-5", # Both sonnet and haiku
|
||||||
"claude-sonnet-4-0",
|
"claude-3-opus",
|
||||||
"claude-opus-4-0",
|
"claude-3-haiku",
|
||||||
"claude-opus-4-1",
|
]
|
||||||
|
|
||||||
|
WEB_SEARCH_UNSUPPORTED_MODELS = [
|
||||||
|
"claude-3-haiku",
|
||||||
|
"claude-3-opus",
|
||||||
|
"claude-3-5-sonnet-20240620",
|
||||||
|
"claude-3-5-sonnet-20241022",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -1,12 +1,17 @@
|
|||||||
"""Base entity for Anthropic."""
|
"""Base entity for Anthropic."""
|
||||||
|
|
||||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||||
|
from dataclasses import dataclass, field
|
||||||
import json
|
import json
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
from anthropic import AsyncStream
|
from anthropic import AsyncStream
|
||||||
from anthropic.types import (
|
from anthropic.types import (
|
||||||
|
CitationsDelta,
|
||||||
|
CitationsWebSearchResultLocation,
|
||||||
|
CitationWebSearchResultLocationParam,
|
||||||
|
ContentBlockParam,
|
||||||
InputJSONDelta,
|
InputJSONDelta,
|
||||||
MessageDeltaUsage,
|
MessageDeltaUsage,
|
||||||
MessageParam,
|
MessageParam,
|
||||||
@@ -16,11 +21,16 @@ from anthropic.types import (
|
|||||||
RawContentBlockStopEvent,
|
RawContentBlockStopEvent,
|
||||||
RawMessageDeltaEvent,
|
RawMessageDeltaEvent,
|
||||||
RawMessageStartEvent,
|
RawMessageStartEvent,
|
||||||
|
RawMessageStopEvent,
|
||||||
RedactedThinkingBlock,
|
RedactedThinkingBlock,
|
||||||
RedactedThinkingBlockParam,
|
RedactedThinkingBlockParam,
|
||||||
|
ServerToolUseBlock,
|
||||||
|
ServerToolUseBlockParam,
|
||||||
SignatureDelta,
|
SignatureDelta,
|
||||||
TextBlock,
|
TextBlock,
|
||||||
TextBlockParam,
|
TextBlockParam,
|
||||||
|
TextCitation,
|
||||||
|
TextCitationParam,
|
||||||
TextDelta,
|
TextDelta,
|
||||||
ThinkingBlock,
|
ThinkingBlock,
|
||||||
ThinkingBlockParam,
|
ThinkingBlockParam,
|
||||||
@@ -29,9 +39,15 @@ from anthropic.types import (
|
|||||||
ThinkingDelta,
|
ThinkingDelta,
|
||||||
ToolParam,
|
ToolParam,
|
||||||
ToolResultBlockParam,
|
ToolResultBlockParam,
|
||||||
|
ToolUnionParam,
|
||||||
ToolUseBlock,
|
ToolUseBlock,
|
||||||
ToolUseBlockParam,
|
ToolUseBlockParam,
|
||||||
Usage,
|
Usage,
|
||||||
|
WebSearchTool20250305Param,
|
||||||
|
WebSearchToolRequestErrorParam,
|
||||||
|
WebSearchToolResultBlock,
|
||||||
|
WebSearchToolResultBlockParam,
|
||||||
|
WebSearchToolResultError,
|
||||||
)
|
)
|
||||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||||
from voluptuous_openapi import convert
|
from voluptuous_openapi import convert
|
||||||
@@ -48,14 +64,21 @@ from .const import (
|
|||||||
CONF_MAX_TOKENS,
|
CONF_MAX_TOKENS,
|
||||||
CONF_TEMPERATURE,
|
CONF_TEMPERATURE,
|
||||||
CONF_THINKING_BUDGET,
|
CONF_THINKING_BUDGET,
|
||||||
|
CONF_WEB_SEARCH,
|
||||||
|
CONF_WEB_SEARCH_CITY,
|
||||||
|
CONF_WEB_SEARCH_COUNTRY,
|
||||||
|
CONF_WEB_SEARCH_MAX_USES,
|
||||||
|
CONF_WEB_SEARCH_REGION,
|
||||||
|
CONF_WEB_SEARCH_TIMEZONE,
|
||||||
|
CONF_WEB_SEARCH_USER_LOCATION,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
LOGGER,
|
LOGGER,
|
||||||
MIN_THINKING_BUDGET,
|
MIN_THINKING_BUDGET,
|
||||||
|
NON_THINKING_MODELS,
|
||||||
RECOMMENDED_CHAT_MODEL,
|
RECOMMENDED_CHAT_MODEL,
|
||||||
RECOMMENDED_MAX_TOKENS,
|
RECOMMENDED_MAX_TOKENS,
|
||||||
RECOMMENDED_TEMPERATURE,
|
RECOMMENDED_TEMPERATURE,
|
||||||
RECOMMENDED_THINKING_BUDGET,
|
RECOMMENDED_THINKING_BUDGET,
|
||||||
THINKING_MODELS,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Max number of back and forth with the LLM to generate a response
|
# Max number of back and forth with the LLM to generate a response
|
||||||
@@ -73,6 +96,69 @@ def _format_tool(
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class CitationDetails:
|
||||||
|
"""Citation details for a content part."""
|
||||||
|
|
||||||
|
index: int = 0
|
||||||
|
"""Start position of the text."""
|
||||||
|
|
||||||
|
length: int = 0
|
||||||
|
"""Length of the relevant data."""
|
||||||
|
|
||||||
|
citations: list[TextCitationParam] = field(default_factory=list)
|
||||||
|
"""Citations for the content part."""
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(slots=True)
|
||||||
|
class ContentDetails:
|
||||||
|
"""Native data for AssistantContent."""
|
||||||
|
|
||||||
|
citation_details: list[CitationDetails] = field(default_factory=list)
|
||||||
|
|
||||||
|
def has_content(self) -> bool:
|
||||||
|
"""Check if there is any content."""
|
||||||
|
return any(detail.length > 0 for detail in self.citation_details)
|
||||||
|
|
||||||
|
def has_citations(self) -> bool:
|
||||||
|
"""Check if there are any citations."""
|
||||||
|
return any(detail.citations for detail in self.citation_details)
|
||||||
|
|
||||||
|
def add_citation_detail(self) -> None:
|
||||||
|
"""Add a new citation detail."""
|
||||||
|
if not self.citation_details or self.citation_details[-1].length > 0:
|
||||||
|
self.citation_details.append(
|
||||||
|
CitationDetails(
|
||||||
|
index=self.citation_details[-1].index
|
||||||
|
+ self.citation_details[-1].length
|
||||||
|
if self.citation_details
|
||||||
|
else 0
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_citation(self, citation: TextCitation) -> None:
|
||||||
|
"""Add a citation to the current detail."""
|
||||||
|
if not self.citation_details:
|
||||||
|
self.citation_details.append(CitationDetails())
|
||||||
|
citation_param: TextCitationParam | None = None
|
||||||
|
if isinstance(citation, CitationsWebSearchResultLocation):
|
||||||
|
citation_param = CitationWebSearchResultLocationParam(
|
||||||
|
type="web_search_result_location",
|
||||||
|
title=citation.title,
|
||||||
|
url=citation.url,
|
||||||
|
cited_text=citation.cited_text,
|
||||||
|
encrypted_index=citation.encrypted_index,
|
||||||
|
)
|
||||||
|
if citation_param:
|
||||||
|
self.citation_details[-1].citations.append(citation_param)
|
||||||
|
|
||||||
|
def delete_empty(self) -> None:
|
||||||
|
"""Delete empty citation details."""
|
||||||
|
self.citation_details = [
|
||||||
|
detail for detail in self.citation_details if detail.citations
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def _convert_content(
|
def _convert_content(
|
||||||
chat_content: Iterable[conversation.Content],
|
chat_content: Iterable[conversation.Content],
|
||||||
) -> list[MessageParam]:
|
) -> list[MessageParam]:
|
||||||
@@ -81,15 +167,31 @@ def _convert_content(
|
|||||||
|
|
||||||
for content in chat_content:
|
for content in chat_content:
|
||||||
if isinstance(content, conversation.ToolResultContent):
|
if isinstance(content, conversation.ToolResultContent):
|
||||||
tool_result_block = ToolResultBlockParam(
|
if content.tool_name == "web_search":
|
||||||
type="tool_result",
|
tool_result_block: ContentBlockParam = WebSearchToolResultBlockParam(
|
||||||
tool_use_id=content.tool_call_id,
|
type="web_search_tool_result",
|
||||||
content=json.dumps(content.tool_result),
|
tool_use_id=content.tool_call_id,
|
||||||
)
|
content=content.tool_result["content"]
|
||||||
if not messages or messages[-1]["role"] != "user":
|
if "content" in content.tool_result
|
||||||
|
else WebSearchToolRequestErrorParam(
|
||||||
|
type="web_search_tool_result_error",
|
||||||
|
error_code=content.tool_result.get("error_code", "unavailable"), # type: ignore[typeddict-item]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
external_tool = True
|
||||||
|
else:
|
||||||
|
tool_result_block = ToolResultBlockParam(
|
||||||
|
type="tool_result",
|
||||||
|
tool_use_id=content.tool_call_id,
|
||||||
|
content=json.dumps(content.tool_result),
|
||||||
|
)
|
||||||
|
external_tool = False
|
||||||
|
if not messages or messages[-1]["role"] != (
|
||||||
|
"assistant" if external_tool else "user"
|
||||||
|
):
|
||||||
messages.append(
|
messages.append(
|
||||||
MessageParam(
|
MessageParam(
|
||||||
role="user",
|
role="assistant" if external_tool else "user",
|
||||||
content=[tool_result_block],
|
content=[tool_result_block],
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
@@ -151,13 +253,56 @@ def _convert_content(
|
|||||||
redacted_thinking_block
|
redacted_thinking_block
|
||||||
)
|
)
|
||||||
if content.content:
|
if content.content:
|
||||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
current_index = 0
|
||||||
TextBlockParam(type="text", text=content.content)
|
for detail in (
|
||||||
)
|
content.native.citation_details
|
||||||
|
if isinstance(content.native, ContentDetails)
|
||||||
|
else [CitationDetails(length=len(content.content))]
|
||||||
|
):
|
||||||
|
if detail.index > current_index:
|
||||||
|
# Add text block for any text without citations
|
||||||
|
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||||
|
TextBlockParam(
|
||||||
|
type="text",
|
||||||
|
text=content.content[current_index : detail.index],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||||
|
TextBlockParam(
|
||||||
|
type="text",
|
||||||
|
text=content.content[
|
||||||
|
detail.index : detail.index + detail.length
|
||||||
|
],
|
||||||
|
citations=detail.citations,
|
||||||
|
)
|
||||||
|
if detail.citations
|
||||||
|
else TextBlockParam(
|
||||||
|
type="text",
|
||||||
|
text=content.content[
|
||||||
|
detail.index : detail.index + detail.length
|
||||||
|
],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
current_index = detail.index + detail.length
|
||||||
|
if current_index < len(content.content):
|
||||||
|
# Add text block for any remaining text without citations
|
||||||
|
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||||
|
TextBlockParam(
|
||||||
|
type="text",
|
||||||
|
text=content.content[current_index:],
|
||||||
|
)
|
||||||
|
)
|
||||||
if content.tool_calls:
|
if content.tool_calls:
|
||||||
messages[-1]["content"].extend( # type: ignore[union-attr]
|
messages[-1]["content"].extend( # type: ignore[union-attr]
|
||||||
[
|
[
|
||||||
ToolUseBlockParam(
|
ServerToolUseBlockParam(
|
||||||
|
type="server_tool_use",
|
||||||
|
id=tool_call.id,
|
||||||
|
name="web_search",
|
||||||
|
input=tool_call.tool_args,
|
||||||
|
)
|
||||||
|
if tool_call.external and tool_call.tool_name == "web_search"
|
||||||
|
else ToolUseBlockParam(
|
||||||
type="tool_use",
|
type="tool_use",
|
||||||
id=tool_call.id,
|
id=tool_call.id,
|
||||||
name=tool_call.tool_name,
|
name=tool_call.tool_name,
|
||||||
@@ -173,10 +318,12 @@ def _convert_content(
|
|||||||
return messages
|
return messages
|
||||||
|
|
||||||
|
|
||||||
async def _transform_stream(
|
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
stream: AsyncStream[MessageStreamEvent],
|
stream: AsyncStream[MessageStreamEvent],
|
||||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
) -> AsyncGenerator[
|
||||||
|
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||||
|
]:
|
||||||
"""Transform the response stream into HA format.
|
"""Transform the response stream into HA format.
|
||||||
|
|
||||||
A typical stream of responses might look something like the following:
|
A typical stream of responses might look something like the following:
|
||||||
@@ -209,11 +356,13 @@ async def _transform_stream(
|
|||||||
if stream is None:
|
if stream is None:
|
||||||
raise TypeError("Expected a stream of messages")
|
raise TypeError("Expected a stream of messages")
|
||||||
|
|
||||||
current_tool_block: ToolUseBlockParam | None = None
|
current_tool_block: ToolUseBlockParam | ServerToolUseBlockParam | None = None
|
||||||
current_tool_args: str
|
current_tool_args: str
|
||||||
|
content_details = ContentDetails()
|
||||||
|
content_details.add_citation_detail()
|
||||||
input_usage: Usage | None = None
|
input_usage: Usage | None = None
|
||||||
has_content = False
|
|
||||||
has_native = False
|
has_native = False
|
||||||
|
first_block: bool
|
||||||
|
|
||||||
async for response in stream:
|
async for response in stream:
|
||||||
LOGGER.debug("Received response: %s", response)
|
LOGGER.debug("Received response: %s", response)
|
||||||
@@ -222,6 +371,7 @@ async def _transform_stream(
|
|||||||
if response.message.role != "assistant":
|
if response.message.role != "assistant":
|
||||||
raise ValueError("Unexpected message role")
|
raise ValueError("Unexpected message role")
|
||||||
input_usage = response.message.usage
|
input_usage = response.message.usage
|
||||||
|
first_block = True
|
||||||
elif isinstance(response, RawContentBlockStartEvent):
|
elif isinstance(response, RawContentBlockStartEvent):
|
||||||
if isinstance(response.content_block, ToolUseBlock):
|
if isinstance(response.content_block, ToolUseBlock):
|
||||||
current_tool_block = ToolUseBlockParam(
|
current_tool_block = ToolUseBlockParam(
|
||||||
@@ -232,17 +382,37 @@ async def _transform_stream(
|
|||||||
)
|
)
|
||||||
current_tool_args = ""
|
current_tool_args = ""
|
||||||
elif isinstance(response.content_block, TextBlock):
|
elif isinstance(response.content_block, TextBlock):
|
||||||
if has_content:
|
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||||
|
first_block
|
||||||
|
or (
|
||||||
|
not content_details.has_citations()
|
||||||
|
and response.content_block.citations is None
|
||||||
|
and content_details.has_content()
|
||||||
|
)
|
||||||
|
):
|
||||||
|
if content_details.has_citations():
|
||||||
|
content_details.delete_empty()
|
||||||
|
yield {"native": content_details}
|
||||||
|
content_details = ContentDetails()
|
||||||
yield {"role": "assistant"}
|
yield {"role": "assistant"}
|
||||||
has_native = False
|
has_native = False
|
||||||
has_content = True
|
first_block = False
|
||||||
|
content_details.add_citation_detail()
|
||||||
if response.content_block.text:
|
if response.content_block.text:
|
||||||
|
content_details.citation_details[-1].length += len(
|
||||||
|
response.content_block.text
|
||||||
|
)
|
||||||
yield {"content": response.content_block.text}
|
yield {"content": response.content_block.text}
|
||||||
elif isinstance(response.content_block, ThinkingBlock):
|
elif isinstance(response.content_block, ThinkingBlock):
|
||||||
if has_native:
|
if first_block or has_native:
|
||||||
|
if content_details.has_citations():
|
||||||
|
content_details.delete_empty()
|
||||||
|
yield {"native": content_details}
|
||||||
|
content_details = ContentDetails()
|
||||||
|
content_details.add_citation_detail()
|
||||||
yield {"role": "assistant"}
|
yield {"role": "assistant"}
|
||||||
has_native = False
|
has_native = False
|
||||||
has_content = False
|
first_block = False
|
||||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Some of Claude’s internal reasoning has been automatically "
|
"Some of Claude’s internal reasoning has been automatically "
|
||||||
@@ -250,15 +420,60 @@ async def _transform_stream(
|
|||||||
"responses"
|
"responses"
|
||||||
)
|
)
|
||||||
if has_native:
|
if has_native:
|
||||||
|
if content_details.has_citations():
|
||||||
|
content_details.delete_empty()
|
||||||
|
yield {"native": content_details}
|
||||||
|
content_details = ContentDetails()
|
||||||
|
content_details.add_citation_detail()
|
||||||
yield {"role": "assistant"}
|
yield {"role": "assistant"}
|
||||||
has_native = False
|
has_native = False
|
||||||
has_content = False
|
first_block = False
|
||||||
yield {"native": response.content_block}
|
yield {"native": response.content_block}
|
||||||
has_native = True
|
has_native = True
|
||||||
|
elif isinstance(response.content_block, ServerToolUseBlock):
|
||||||
|
current_tool_block = ServerToolUseBlockParam(
|
||||||
|
type="server_tool_use",
|
||||||
|
id=response.content_block.id,
|
||||||
|
name=response.content_block.name,
|
||||||
|
input="",
|
||||||
|
)
|
||||||
|
current_tool_args = ""
|
||||||
|
elif isinstance(response.content_block, WebSearchToolResultBlock):
|
||||||
|
if content_details.has_citations():
|
||||||
|
content_details.delete_empty()
|
||||||
|
yield {"native": content_details}
|
||||||
|
content_details = ContentDetails()
|
||||||
|
content_details.add_citation_detail()
|
||||||
|
yield {
|
||||||
|
"role": "tool_result",
|
||||||
|
"tool_call_id": response.content_block.tool_use_id,
|
||||||
|
"tool_name": "web_search",
|
||||||
|
"tool_result": {
|
||||||
|
"type": "web_search_tool_result_error",
|
||||||
|
"error_code": response.content_block.content.error_code,
|
||||||
|
}
|
||||||
|
if isinstance(
|
||||||
|
response.content_block.content, WebSearchToolResultError
|
||||||
|
)
|
||||||
|
else {
|
||||||
|
"content": [
|
||||||
|
{
|
||||||
|
"type": "web_search_result",
|
||||||
|
"encrypted_content": block.encrypted_content,
|
||||||
|
"page_age": block.page_age,
|
||||||
|
"title": block.title,
|
||||||
|
"url": block.url,
|
||||||
|
}
|
||||||
|
for block in response.content_block.content
|
||||||
|
]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
first_block = True
|
||||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||||
if isinstance(response.delta, InputJSONDelta):
|
if isinstance(response.delta, InputJSONDelta):
|
||||||
current_tool_args += response.delta.partial_json
|
current_tool_args += response.delta.partial_json
|
||||||
elif isinstance(response.delta, TextDelta):
|
elif isinstance(response.delta, TextDelta):
|
||||||
|
content_details.citation_details[-1].length += len(response.delta.text)
|
||||||
yield {"content": response.delta.text}
|
yield {"content": response.delta.text}
|
||||||
elif isinstance(response.delta, ThinkingDelta):
|
elif isinstance(response.delta, ThinkingDelta):
|
||||||
yield {"thinking_content": response.delta.thinking}
|
yield {"thinking_content": response.delta.thinking}
|
||||||
@@ -271,6 +486,8 @@ async def _transform_stream(
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
has_native = True
|
has_native = True
|
||||||
|
elif isinstance(response.delta, CitationsDelta):
|
||||||
|
content_details.add_citation(response.delta.citation)
|
||||||
elif isinstance(response, RawContentBlockStopEvent):
|
elif isinstance(response, RawContentBlockStopEvent):
|
||||||
if current_tool_block is not None:
|
if current_tool_block is not None:
|
||||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||||
@@ -281,6 +498,7 @@ async def _transform_stream(
|
|||||||
id=current_tool_block["id"],
|
id=current_tool_block["id"],
|
||||||
tool_name=current_tool_block["name"],
|
tool_name=current_tool_block["name"],
|
||||||
tool_args=tool_args,
|
tool_args=tool_args,
|
||||||
|
external=current_tool_block["type"] == "server_tool_use",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
@@ -290,6 +508,12 @@ async def _transform_stream(
|
|||||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||||
if response.delta.stop_reason == "refusal":
|
if response.delta.stop_reason == "refusal":
|
||||||
raise HomeAssistantError("Potential policy violation detected")
|
raise HomeAssistantError("Potential policy violation detected")
|
||||||
|
elif isinstance(response, RawMessageStopEvent):
|
||||||
|
if content_details.has_citations():
|
||||||
|
content_details.delete_empty()
|
||||||
|
yield {"native": content_details}
|
||||||
|
content_details = ContentDetails()
|
||||||
|
content_details.add_citation_detail()
|
||||||
|
|
||||||
|
|
||||||
def _create_token_stats(
|
def _create_token_stats(
|
||||||
@@ -337,21 +561,11 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
"""Generate an answer for the chat log."""
|
"""Generate an answer for the chat log."""
|
||||||
options = self.subentry.data
|
options = self.subentry.data
|
||||||
|
|
||||||
tools: list[ToolParam] | None = None
|
|
||||||
if chat_log.llm_api:
|
|
||||||
tools = [
|
|
||||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
|
||||||
for tool in chat_log.llm_api.tools
|
|
||||||
]
|
|
||||||
|
|
||||||
system = chat_log.content[0]
|
system = chat_log.content[0]
|
||||||
if not isinstance(system, conversation.SystemContent):
|
if not isinstance(system, conversation.SystemContent):
|
||||||
raise TypeError("First message must be a system message")
|
raise TypeError("First message must be a system message")
|
||||||
messages = _convert_content(chat_log.content[1:])
|
messages = _convert_content(chat_log.content[1:])
|
||||||
|
|
||||||
client = self.entry.runtime_data
|
|
||||||
|
|
||||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
|
||||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
|
|
||||||
model_args = MessageCreateParamsStreaming(
|
model_args = MessageCreateParamsStreaming(
|
||||||
@@ -361,10 +575,10 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
system=system.content,
|
system=system.content,
|
||||||
stream=True,
|
stream=True,
|
||||||
)
|
)
|
||||||
if tools:
|
|
||||||
model_args["tools"] = tools
|
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||||
if (
|
if (
|
||||||
model.startswith(tuple(THINKING_MODELS))
|
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||||
and thinking_budget >= MIN_THINKING_BUDGET
|
and thinking_budget >= MIN_THINKING_BUDGET
|
||||||
):
|
):
|
||||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||||
@@ -376,6 +590,34 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||||
)
|
)
|
||||||
|
|
||||||
|
tools: list[ToolUnionParam] = []
|
||||||
|
if chat_log.llm_api:
|
||||||
|
tools = [
|
||||||
|
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||||
|
for tool in chat_log.llm_api.tools
|
||||||
|
]
|
||||||
|
|
||||||
|
if options.get(CONF_WEB_SEARCH):
|
||||||
|
web_search = WebSearchTool20250305Param(
|
||||||
|
name="web_search",
|
||||||
|
type="web_search_20250305",
|
||||||
|
max_uses=options.get(CONF_WEB_SEARCH_MAX_USES),
|
||||||
|
)
|
||||||
|
if options.get(CONF_WEB_SEARCH_USER_LOCATION):
|
||||||
|
web_search["user_location"] = {
|
||||||
|
"type": "approximate",
|
||||||
|
"city": options.get(CONF_WEB_SEARCH_CITY, ""),
|
||||||
|
"region": options.get(CONF_WEB_SEARCH_REGION, ""),
|
||||||
|
"country": options.get(CONF_WEB_SEARCH_COUNTRY, ""),
|
||||||
|
"timezone": options.get(CONF_WEB_SEARCH_TIMEZONE, ""),
|
||||||
|
}
|
||||||
|
tools.append(web_search)
|
||||||
|
|
||||||
|
if tools:
|
||||||
|
model_args["tools"] = tools
|
||||||
|
|
||||||
|
client = self.entry.runtime_data
|
||||||
|
|
||||||
# To prevent infinite loops, we limit the number of iterations
|
# To prevent infinite loops, we limit the number of iterations
|
||||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -8,5 +8,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["anthropic==0.62.0"]
|
"requirements": ["anthropic==0.69.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -35,11 +35,17 @@
|
|||||||
"temperature": "Temperature",
|
"temperature": "Temperature",
|
||||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||||
"recommended": "Recommended model settings",
|
"recommended": "Recommended model settings",
|
||||||
"thinking_budget_tokens": "Thinking budget"
|
"thinking_budget": "Thinking budget",
|
||||||
|
"web_search": "Enable web search",
|
||||||
|
"web_search_max_uses": "Maximum web searches",
|
||||||
|
"user_location": "Include home location"
|
||||||
},
|
},
|
||||||
"data_description": {
|
"data_description": {
|
||||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||||
"thinking_budget_tokens": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking."
|
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||||
|
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||||
|
"web_search_max_uses": "Limit the number of searches performed per response",
|
||||||
|
"user_location": "Localize search results based on home location"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -48,7 +54,8 @@
|
|||||||
"entry_not_loaded": "Cannot add things while the configuration is disabled."
|
"entry_not_loaded": "Cannot add things while the configuration is disabled."
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget."
|
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.",
|
||||||
|
"web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,14 +5,9 @@ from __future__ import annotations
|
|||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from random import randrange
|
from random import randrange
|
||||||
|
import sys
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from pyatv import connect, exceptions, scan
|
|
||||||
from pyatv.conf import AppleTV
|
|
||||||
from pyatv.const import DeviceModel, Protocol
|
|
||||||
from pyatv.convert import model_str
|
|
||||||
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
|
|
||||||
|
|
||||||
from homeassistant.components import zeroconf
|
from homeassistant.components import zeroconf
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
@@ -29,7 +24,11 @@ from homeassistant.const import (
|
|||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import Event, HomeAssistant, callback
|
from homeassistant.core import Event, HomeAssistant, callback
|
||||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
from homeassistant.exceptions import (
|
||||||
|
ConfigEntryAuthFailed,
|
||||||
|
ConfigEntryNotReady,
|
||||||
|
HomeAssistantError,
|
||||||
|
)
|
||||||
from homeassistant.helpers import device_registry as dr
|
from homeassistant.helpers import device_registry as dr
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||||
@@ -43,6 +42,18 @@ from .const import (
|
|||||||
SIGNAL_DISCONNECTED,
|
SIGNAL_DISCONNECTED,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if sys.version_info < (3, 14):
|
||||||
|
from pyatv import connect, exceptions, scan
|
||||||
|
from pyatv.conf import AppleTV
|
||||||
|
from pyatv.const import DeviceModel, Protocol
|
||||||
|
from pyatv.convert import model_str
|
||||||
|
from pyatv.interface import AppleTV as AppleTVInterface, DeviceListener
|
||||||
|
else:
|
||||||
|
|
||||||
|
class DeviceListener:
|
||||||
|
"""Dummy class."""
|
||||||
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
DEFAULT_NAME_TV = "Apple TV"
|
DEFAULT_NAME_TV = "Apple TV"
|
||||||
@@ -53,31 +64,41 @@ BACKOFF_TIME_UPPER_LIMIT = 300 # Five minutes
|
|||||||
|
|
||||||
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE]
|
||||||
|
|
||||||
AUTH_EXCEPTIONS = (
|
if sys.version_info < (3, 14):
|
||||||
exceptions.AuthenticationError,
|
AUTH_EXCEPTIONS = (
|
||||||
exceptions.InvalidCredentialsError,
|
exceptions.AuthenticationError,
|
||||||
exceptions.NoCredentialsError,
|
exceptions.InvalidCredentialsError,
|
||||||
)
|
exceptions.NoCredentialsError,
|
||||||
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
)
|
||||||
OSError,
|
CONNECTION_TIMEOUT_EXCEPTIONS = (
|
||||||
asyncio.CancelledError,
|
OSError,
|
||||||
TimeoutError,
|
asyncio.CancelledError,
|
||||||
exceptions.ConnectionLostError,
|
TimeoutError,
|
||||||
exceptions.ConnectionFailedError,
|
exceptions.ConnectionLostError,
|
||||||
)
|
exceptions.ConnectionFailedError,
|
||||||
DEVICE_EXCEPTIONS = (
|
)
|
||||||
exceptions.ProtocolError,
|
DEVICE_EXCEPTIONS = (
|
||||||
exceptions.NoServiceError,
|
exceptions.ProtocolError,
|
||||||
exceptions.PairingError,
|
exceptions.NoServiceError,
|
||||||
exceptions.BackOffError,
|
exceptions.PairingError,
|
||||||
exceptions.DeviceIdMissingError,
|
exceptions.BackOffError,
|
||||||
)
|
exceptions.DeviceIdMissingError,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
AUTH_EXCEPTIONS = ()
|
||||||
|
CONNECTION_TIMEOUT_EXCEPTIONS = ()
|
||||||
|
DEVICE_EXCEPTIONS = ()
|
||||||
|
|
||||||
|
|
||||||
type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
type AppleTvConfigEntry = ConfigEntry[AppleTVManager]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AppleTvConfigEntry) -> bool:
|
||||||
"""Set up a config entry for Apple TV."""
|
"""Set up a config entry for Apple TV."""
|
||||||
|
if sys.version_info >= (3, 14):
|
||||||
|
raise HomeAssistantError(
|
||||||
|
"Apple TV is not supported on Python 3.14. Please use Python 3.13."
|
||||||
|
)
|
||||||
manager = AppleTVManager(hass, entry)
|
manager = AppleTVManager(hass, entry)
|
||||||
|
|
||||||
if manager.is_on:
|
if manager.is_on:
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["pyatv", "srptools"],
|
"loggers": ["pyatv", "srptools"],
|
||||||
"requirements": ["pyatv==0.16.1"],
|
"requirements": ["pyatv==0.16.1;python_version<'3.14'"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
"_mediaremotetv._tcp.local.",
|
"_mediaremotetv._tcp.local.",
|
||||||
"_companion-link._tcp.local.",
|
"_companion-link._tcp.local.",
|
||||||
|
|||||||
@@ -7,6 +7,8 @@ from typing import Any
|
|||||||
from pyaprilaire.const import Attribute
|
from pyaprilaire.const import Attribute
|
||||||
|
|
||||||
from homeassistant.components.climate import (
|
from homeassistant.components.climate import (
|
||||||
|
ATTR_TARGET_TEMP_HIGH,
|
||||||
|
ATTR_TARGET_TEMP_LOW,
|
||||||
FAN_AUTO,
|
FAN_AUTO,
|
||||||
FAN_ON,
|
FAN_ON,
|
||||||
PRESET_AWAY,
|
PRESET_AWAY,
|
||||||
@@ -16,7 +18,12 @@ from homeassistant.components.climate import (
|
|||||||
HVACAction,
|
HVACAction,
|
||||||
HVACMode,
|
HVACMode,
|
||||||
)
|
)
|
||||||
from homeassistant.const import PRECISION_HALVES, PRECISION_WHOLE, UnitOfTemperature
|
from homeassistant.const import (
|
||||||
|
ATTR_TEMPERATURE,
|
||||||
|
PRECISION_HALVES,
|
||||||
|
PRECISION_WHOLE,
|
||||||
|
UnitOfTemperature,
|
||||||
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
@@ -232,15 +239,15 @@ class AprilaireClimate(BaseAprilaireEntity, ClimateEntity):
|
|||||||
cool_setpoint = 0
|
cool_setpoint = 0
|
||||||
heat_setpoint = 0
|
heat_setpoint = 0
|
||||||
|
|
||||||
if temperature := kwargs.get("temperature"):
|
if temperature := kwargs.get(ATTR_TEMPERATURE):
|
||||||
if self.coordinator.data.get(Attribute.MODE) == 3:
|
if self.coordinator.data.get(Attribute.MODE) == 3:
|
||||||
cool_setpoint = temperature
|
cool_setpoint = temperature
|
||||||
else:
|
else:
|
||||||
heat_setpoint = temperature
|
heat_setpoint = temperature
|
||||||
else:
|
else:
|
||||||
if target_temp_low := kwargs.get("target_temp_low"):
|
if target_temp_low := kwargs.get(ATTR_TARGET_TEMP_LOW):
|
||||||
heat_setpoint = target_temp_low
|
heat_setpoint = target_temp_low
|
||||||
if target_temp_high := kwargs.get("target_temp_high"):
|
if target_temp_high := kwargs.get(ATTR_TARGET_TEMP_HIGH):
|
||||||
cool_setpoint = target_temp_high
|
cool_setpoint = target_temp_high
|
||||||
|
|
||||||
if cool_setpoint == 0 and heat_setpoint == 0:
|
if cool_setpoint == 0 and heat_setpoint == 0:
|
||||||
|
|||||||
@@ -41,6 +41,8 @@ from .pipeline import (
|
|||||||
async_setup_pipeline_store,
|
async_setup_pipeline_store,
|
||||||
async_update_pipeline,
|
async_update_pipeline,
|
||||||
)
|
)
|
||||||
|
from .select import AssistPipelineSelect, VadSensitivitySelect
|
||||||
|
from .vad import VadSensitivity
|
||||||
from .websocket_api import async_register_websocket_api
|
from .websocket_api import async_register_websocket_api
|
||||||
|
|
||||||
__all__ = (
|
__all__ = (
|
||||||
@@ -51,16 +53,18 @@ __all__ = (
|
|||||||
"SAMPLE_CHANNELS",
|
"SAMPLE_CHANNELS",
|
||||||
"SAMPLE_RATE",
|
"SAMPLE_RATE",
|
||||||
"SAMPLE_WIDTH",
|
"SAMPLE_WIDTH",
|
||||||
|
"AssistPipelineSelect",
|
||||||
"AudioSettings",
|
"AudioSettings",
|
||||||
"Pipeline",
|
"Pipeline",
|
||||||
"PipelineEvent",
|
"PipelineEvent",
|
||||||
"PipelineEventType",
|
"PipelineEventType",
|
||||||
"PipelineNotFound",
|
"PipelineNotFound",
|
||||||
|
"VadSensitivity",
|
||||||
|
"VadSensitivitySelect",
|
||||||
"WakeWordSettings",
|
"WakeWordSettings",
|
||||||
"async_create_default_pipeline",
|
"async_create_default_pipeline",
|
||||||
"async_get_pipelines",
|
"async_get_pipelines",
|
||||||
"async_pipeline_from_audio_stream",
|
"async_pipeline_from_audio_stream",
|
||||||
"async_setup",
|
|
||||||
"async_update_pipeline",
|
"async_update_pipeline",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,14 @@ import wave
|
|||||||
import hass_nabucasa
|
import hass_nabucasa
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
from homeassistant.components import (
|
||||||
|
conversation,
|
||||||
|
media_player,
|
||||||
|
stt,
|
||||||
|
tts,
|
||||||
|
wake_word,
|
||||||
|
websocket_api,
|
||||||
|
)
|
||||||
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
from homeassistant.const import ATTR_SUPPORTED_FEATURES, MATCH_ALL
|
||||||
from homeassistant.core import Context, HomeAssistant, callback
|
from homeassistant.core import Context, HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
@@ -130,7 +137,10 @@ SAVE_DELAY = 10
|
|||||||
@callback
|
@callback
|
||||||
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
def _async_local_fallback_intent_filter(result: RecognizeResult) -> bool:
|
||||||
"""Filter out intents that are not local fallback."""
|
"""Filter out intents that are not local fallback."""
|
||||||
return result.intent.name in (intent.INTENT_GET_STATE)
|
return result.intent.name in (
|
||||||
|
intent.INTENT_GET_STATE,
|
||||||
|
media_player.INTENT_MEDIA_SEARCH_AND_PLAY,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
@@ -1308,7 +1318,9 @@ class PipelineRun:
|
|||||||
# instead of a full response.
|
# instead of a full response.
|
||||||
all_targets_in_satellite_area = (
|
all_targets_in_satellite_area = (
|
||||||
self._get_all_targets_in_satellite_area(
|
self._get_all_targets_in_satellite_area(
|
||||||
conversation_result.response, self._device_id
|
conversation_result.response,
|
||||||
|
self._satellite_id,
|
||||||
|
self._device_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -1337,39 +1349,62 @@ class PipelineRun:
|
|||||||
return (speech, all_targets_in_satellite_area)
|
return (speech, all_targets_in_satellite_area)
|
||||||
|
|
||||||
def _get_all_targets_in_satellite_area(
|
def _get_all_targets_in_satellite_area(
|
||||||
self, intent_response: intent.IntentResponse, device_id: str | None
|
self,
|
||||||
|
intent_response: intent.IntentResponse,
|
||||||
|
satellite_id: str | None,
|
||||||
|
device_id: str | None,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Return true if all targeted entities were in the same area as the device."""
|
"""Return true if all targeted entities were in the same area as the device."""
|
||||||
if (
|
if (
|
||||||
(intent_response.response_type != intent.IntentResponseType.ACTION_DONE)
|
intent_response.response_type != intent.IntentResponseType.ACTION_DONE
|
||||||
or (not intent_response.matched_states)
|
or not intent_response.matched_states
|
||||||
or (not device_id)
|
|
||||||
):
|
|
||||||
return False
|
|
||||||
|
|
||||||
device_registry = dr.async_get(self.hass)
|
|
||||||
|
|
||||||
if (not (device := device_registry.async_get(device_id))) or (
|
|
||||||
not device.area_id
|
|
||||||
):
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
entity_registry = er.async_get(self.hass)
|
entity_registry = er.async_get(self.hass)
|
||||||
for state in intent_response.matched_states:
|
device_registry = dr.async_get(self.hass)
|
||||||
entity = entity_registry.async_get(state.entity_id)
|
|
||||||
if not entity:
|
area_id: str | None = None
|
||||||
|
|
||||||
|
if (
|
||||||
|
satellite_id is not None
|
||||||
|
and (target_entity_entry := entity_registry.async_get(satellite_id))
|
||||||
|
is not None
|
||||||
|
):
|
||||||
|
area_id = target_entity_entry.area_id
|
||||||
|
device_id = target_entity_entry.device_id
|
||||||
|
|
||||||
|
if area_id is None:
|
||||||
|
if device_id is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
if (entity_area_id := entity.area_id) is None:
|
device_entry = device_registry.async_get(device_id)
|
||||||
if (entity.device_id is None) or (
|
if device_entry is None:
|
||||||
(entity_device := device_registry.async_get(entity.device_id))
|
return False
|
||||||
is None
|
|
||||||
):
|
area_id = device_entry.area_id
|
||||||
|
if area_id is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for state in intent_response.matched_states:
|
||||||
|
target_entity_entry = entity_registry.async_get(state.entity_id)
|
||||||
|
if target_entity_entry is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
target_area_id = target_entity_entry.area_id
|
||||||
|
if target_area_id is None:
|
||||||
|
if target_entity_entry.device_id is None:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
entity_area_id = entity_device.area_id
|
target_device_entry = device_registry.async_get(
|
||||||
|
target_entity_entry.device_id
|
||||||
|
)
|
||||||
|
if target_device_entry is None:
|
||||||
|
return False
|
||||||
|
|
||||||
if entity_area_id != device.area_id:
|
target_area_id = target_device_entry.area_id
|
||||||
|
|
||||||
|
if target_area_id != area_id:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -3,17 +3,17 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import namedtuple
|
|
||||||
from collections.abc import Awaitable, Callable, Coroutine
|
from collections.abc import Awaitable, Callable, Coroutine
|
||||||
import functools
|
import functools
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, NamedTuple
|
||||||
|
|
||||||
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
from asusrouter import AsusRouter, AsusRouterError
|
from asusrouter import AsusRouter, AsusRouterError
|
||||||
from asusrouter.config import ARConfigKey
|
from asusrouter.config import ARConfigKey
|
||||||
from asusrouter.modules.client import AsusClient
|
from asusrouter.modules.client import AsusClient
|
||||||
|
from asusrouter.modules.connection import ConnectionState
|
||||||
from asusrouter.modules.data import AsusData
|
from asusrouter.modules.data import AsusData
|
||||||
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
|
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
|
||||||
from asusrouter.tools.connection import get_cookie_jar
|
from asusrouter.tools.connection import get_cookie_jar
|
||||||
@@ -61,11 +61,27 @@ SENSORS_TYPE_RATES = "sensors_rates"
|
|||||||
SENSORS_TYPE_TEMPERATURES = "sensors_temperatures"
|
SENSORS_TYPE_TEMPERATURES = "sensors_temperatures"
|
||||||
SENSORS_TYPE_UPTIME = "sensors_uptime"
|
SENSORS_TYPE_UPTIME = "sensors_uptime"
|
||||||
|
|
||||||
WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) # noqa: PYI024
|
|
||||||
|
class WrtDevice(NamedTuple):
|
||||||
|
"""WrtDevice structure."""
|
||||||
|
|
||||||
|
ip: str | None
|
||||||
|
name: str | None
|
||||||
|
conneted_to: str | None
|
||||||
|
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
type _FuncType[_T] = Callable[[_T], Awaitable[list[Any] | tuple[Any] | dict[str, Any]]]
|
type _FuncType[_T] = Callable[
|
||||||
|
[_T],
|
||||||
|
Awaitable[
|
||||||
|
list[str]
|
||||||
|
| tuple[float | None, float | None]
|
||||||
|
| list[float]
|
||||||
|
| dict[str, float | str | None]
|
||||||
|
| dict[str, float]
|
||||||
|
],
|
||||||
|
]
|
||||||
type _ReturnFuncType[_T] = Callable[[_T], Coroutine[Any, Any, dict[str, Any]]]
|
type _ReturnFuncType[_T] = Callable[[_T], Coroutine[Any, Any, dict[str, Any]]]
|
||||||
|
|
||||||
|
|
||||||
@@ -80,7 +96,9 @@ def handle_errors_and_zip[_AsusWrtBridgeT: AsusWrtBridge](
|
|||||||
"""Run library methods and zip results or manage exceptions."""
|
"""Run library methods and zip results or manage exceptions."""
|
||||||
|
|
||||||
@functools.wraps(func)
|
@functools.wraps(func)
|
||||||
async def _wrapper(self: _AsusWrtBridgeT) -> dict[str, Any]:
|
async def _wrapper(
|
||||||
|
self: _AsusWrtBridgeT,
|
||||||
|
) -> dict[str, float | str | None] | dict[str, float]:
|
||||||
try:
|
try:
|
||||||
data = await func(self)
|
data = await func(self)
|
||||||
except exceptions as exc:
|
except exceptions as exc:
|
||||||
@@ -107,7 +125,9 @@ class AsusWrtBridge(ABC):
|
|||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_bridge(
|
def get_bridge(
|
||||||
hass: HomeAssistant, conf: dict[str, Any], options: dict[str, Any] | None = None
|
hass: HomeAssistant,
|
||||||
|
conf: dict[str, str | int],
|
||||||
|
options: dict[str, str | bool | int] | None = None,
|
||||||
) -> AsusWrtBridge:
|
) -> AsusWrtBridge:
|
||||||
"""Get Bridge instance."""
|
"""Get Bridge instance."""
|
||||||
if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP):
|
if conf[CONF_PROTOCOL] in (PROTOCOL_HTTPS, PROTOCOL_HTTP):
|
||||||
@@ -219,7 +239,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
|||||||
@property
|
@property
|
||||||
def is_connected(self) -> bool:
|
def is_connected(self) -> bool:
|
||||||
"""Get connected status."""
|
"""Get connected status."""
|
||||||
return cast(bool, self._api.is_connected)
|
return self._api.is_connected
|
||||||
|
|
||||||
async def async_connect(self) -> None:
|
async def async_connect(self) -> None:
|
||||||
"""Connect to the device."""
|
"""Connect to the device."""
|
||||||
@@ -235,8 +255,7 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
|||||||
|
|
||||||
async def async_disconnect(self) -> None:
|
async def async_disconnect(self) -> None:
|
||||||
"""Disconnect to the device."""
|
"""Disconnect to the device."""
|
||||||
if self._api is not None and self._protocol == PROTOCOL_TELNET:
|
await self._api.async_disconnect()
|
||||||
self._api.connection.disconnect()
|
|
||||||
|
|
||||||
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
||||||
"""Get list of connected devices."""
|
"""Get list of connected devices."""
|
||||||
@@ -307,22 +326,22 @@ class AsusWrtLegacyBridge(AsusWrtBridge):
|
|||||||
return [SENSORS_TEMPERATURES_LEGACY[i] for i in range(3) if availability[i]]
|
return [SENSORS_TEMPERATURES_LEGACY[i] for i in range(3) if availability[i]]
|
||||||
|
|
||||||
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_BYTES)
|
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_BYTES)
|
||||||
async def _get_bytes(self) -> Any:
|
async def _get_bytes(self) -> tuple[float | None, float | None]:
|
||||||
"""Fetch byte information from the router."""
|
"""Fetch byte information from the router."""
|
||||||
return await self._api.async_get_bytes_total()
|
return await self._api.async_get_bytes_total()
|
||||||
|
|
||||||
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_RATES)
|
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_RATES)
|
||||||
async def _get_rates(self) -> Any:
|
async def _get_rates(self) -> tuple[float, float]:
|
||||||
"""Fetch rates information from the router."""
|
"""Fetch rates information from the router."""
|
||||||
return await self._api.async_get_current_transfer_rates()
|
return await self._api.async_get_current_transfer_rates()
|
||||||
|
|
||||||
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_LOAD_AVG)
|
@handle_errors_and_zip((IndexError, OSError, ValueError), SENSORS_LOAD_AVG)
|
||||||
async def _get_load_avg(self) -> Any:
|
async def _get_load_avg(self) -> list[float]:
|
||||||
"""Fetch load average information from the router."""
|
"""Fetch load average information from the router."""
|
||||||
return await self._api.async_get_loadavg()
|
return await self._api.async_get_loadavg()
|
||||||
|
|
||||||
@handle_errors_and_zip((OSError, ValueError), None)
|
@handle_errors_and_zip((OSError, ValueError), None)
|
||||||
async def _get_temperatures(self) -> Any:
|
async def _get_temperatures(self) -> dict[str, float]:
|
||||||
"""Fetch temperatures information from the router."""
|
"""Fetch temperatures information from the router."""
|
||||||
return await self._api.async_get_temperature()
|
return await self._api.async_get_temperature()
|
||||||
|
|
||||||
@@ -437,6 +456,7 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
if dev.connection is not None
|
if dev.connection is not None
|
||||||
and dev.description is not None
|
and dev.description is not None
|
||||||
and dev.connection.ip_address is not None
|
and dev.connection.ip_address is not None
|
||||||
|
and dev.state is ConnectionState.CONNECTED
|
||||||
}
|
}
|
||||||
|
|
||||||
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
||||||
|
|||||||
@@ -175,12 +175,12 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def _async_check_connection(
|
async def _async_check_connection(
|
||||||
self, user_input: dict[str, Any]
|
self, user_input: dict[str, str | int]
|
||||||
) -> tuple[str, str | None]:
|
) -> tuple[str, str | None]:
|
||||||
"""Attempt to connect the AsusWrt router."""
|
"""Attempt to connect the AsusWrt router."""
|
||||||
|
|
||||||
api: AsusWrtBridge
|
api: AsusWrtBridge
|
||||||
host: str = user_input[CONF_HOST]
|
host = user_input[CONF_HOST]
|
||||||
protocol = user_input[CONF_PROTOCOL]
|
protocol = user_input[CONF_PROTOCOL]
|
||||||
error: str | None = None
|
error: str | None = None
|
||||||
|
|
||||||
|
|||||||
@@ -10,8 +10,6 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from . import AsusWrtConfigEntry
|
from . import AsusWrtConfigEntry
|
||||||
from .router import AsusWrtDevInfo, AsusWrtRouter
|
from .router import AsusWrtDevInfo, AsusWrtRouter
|
||||||
|
|
||||||
ATTR_LAST_TIME_REACHABLE = "last_time_reachable"
|
|
||||||
|
|
||||||
DEFAULT_DEVICE_NAME = "Unknown device"
|
DEFAULT_DEVICE_NAME = "Unknown device"
|
||||||
|
|
||||||
|
|
||||||
@@ -58,8 +56,6 @@ def add_entities(
|
|||||||
class AsusWrtDevice(ScannerEntity):
|
class AsusWrtDevice(ScannerEntity):
|
||||||
"""Representation of a AsusWrt device."""
|
"""Representation of a AsusWrt device."""
|
||||||
|
|
||||||
_unrecorded_attributes = frozenset({ATTR_LAST_TIME_REACHABLE})
|
|
||||||
|
|
||||||
_attr_should_poll = False
|
_attr_should_poll = False
|
||||||
|
|
||||||
def __init__(self, router: AsusWrtRouter, device: AsusWrtDevInfo) -> None:
|
def __init__(self, router: AsusWrtRouter, device: AsusWrtDevInfo) -> None:
|
||||||
@@ -97,11 +93,6 @@ class AsusWrtDevice(ScannerEntity):
|
|||||||
def async_on_demand_update(self) -> None:
|
def async_on_demand_update(self) -> None:
|
||||||
"""Update state."""
|
"""Update state."""
|
||||||
self._device = self._router.devices[self._device.mac]
|
self._device = self._router.devices[self._device.mac]
|
||||||
self._attr_extra_state_attributes = {}
|
|
||||||
if self._device.last_activity:
|
|
||||||
self._attr_extra_state_attributes[ATTR_LAST_TIME_REACHABLE] = (
|
|
||||||
self._device.last_activity.isoformat(timespec="seconds")
|
|
||||||
)
|
|
||||||
self.async_write_ha_state()
|
self.async_write_ha_state()
|
||||||
|
|
||||||
async def async_added_to_hass(self) -> None:
|
async def async_added_to_hass(self) -> None:
|
||||||
|
|||||||
@@ -2,9 +2,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
from typing import Any, TypeVar
|
from typing import Any
|
||||||
|
|
||||||
T = TypeVar("T", dict[str, Any], list[Any], None)
|
|
||||||
|
|
||||||
TRANSLATION_MAP = {
|
TRANSLATION_MAP = {
|
||||||
"wan_rx": "sensor_rx_bytes",
|
"wan_rx": "sensor_rx_bytes",
|
||||||
@@ -36,7 +34,7 @@ def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
|
|||||||
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
|
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
|
||||||
|
|
||||||
|
|
||||||
def translate_to_legacy(raw: T) -> T:
|
def translate_to_legacy[T: (dict[str, Any], list[Any], None)](raw: T) -> T:
|
||||||
"""Translate raw data to legacy format for dicts and lists."""
|
"""Translate raw data to legacy format for dicts and lists."""
|
||||||
|
|
||||||
if raw is None:
|
if raw is None:
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user