mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 20:09:35 +00:00
Compare commits
781 Commits
debug_esph
...
cloud_enab
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b23eacc7ad | ||
![]() |
5293fc73d8 | ||
![]() |
870bf388e0 | ||
![]() |
7a4dac1eb1 | ||
![]() |
88480d154a | ||
![]() |
5497c440d9 | ||
![]() |
1e26cf13d6 | ||
![]() |
0dd208a4b9 | ||
![]() |
c3492bc0ed | ||
![]() |
85bf8d1374 | ||
![]() |
e040eb0ff2 | ||
![]() |
d7f41ff8a9 | ||
![]() |
de5437f61e | ||
![]() |
c52a893e21 | ||
![]() |
f7f1830b7e | ||
![]() |
784ad20fb6 | ||
![]() |
0468e7e7a3 | ||
![]() |
88c227681d | ||
![]() |
3a37ff13a6 | ||
![]() |
73929e6791 | ||
![]() |
980b0fa5e6 | ||
![]() |
fbc4a87166 | ||
![]() |
7f9ec2a79e | ||
![]() |
d8b55d39e4 | ||
![]() |
ee41725b53 | ||
![]() |
ae1203336d | ||
![]() |
f10063c9be | ||
![]() |
1da4579a09 | ||
![]() |
7fd9339ad8 | ||
![]() |
de391fa98b | ||
![]() |
70211ab78e | ||
![]() |
a1a08f7755 | ||
![]() |
433321136d | ||
![]() |
0677bba5bd | ||
![]() |
d0ad834d93 | ||
![]() |
7d2d6a82b0 | ||
![]() |
e8dc62411a | ||
![]() |
7925007ab4 | ||
![]() |
7515deddab | ||
![]() |
e382f924e6 | ||
![]() |
7fdcb98518 | ||
![]() |
d0dbca41f7 | ||
![]() |
f3229c723c | ||
![]() |
cafa598fd6 | ||
![]() |
73a62a09b0 | ||
![]() |
ecd8dde347 | ||
![]() |
31a2bb1b98 | ||
![]() |
0fc019305e | ||
![]() |
adb1c59859 | ||
![]() |
5d0277a0d1 | ||
![]() |
21d81d5a5c | ||
![]() |
0de4bfcc2c | ||
![]() |
2cc5486794 | ||
![]() |
e3315383ab | ||
![]() |
31b505828b | ||
![]() |
b61580a937 | ||
![]() |
928e5348e4 | ||
![]() |
622682eb43 | ||
![]() |
97fa568876 | ||
![]() |
c10f078f2a | ||
![]() |
e6d16f06fc | ||
![]() |
c89ab7a142 | ||
![]() |
6837ea947c | ||
![]() |
5f0f29704b | ||
![]() |
1f43dc6676 | ||
![]() |
4d7405de2c | ||
![]() |
4adffdd1a6 | ||
![]() |
4e2f5bdb7d | ||
![]() |
03bc711c51 | ||
![]() |
8b8e949bdf | ||
![]() |
69ba0d3a50 | ||
![]() |
25fb70f281 | ||
![]() |
0304588bb8 | ||
![]() |
08f5081197 | ||
![]() |
701f35488c | ||
![]() |
d11012b2b7 | ||
![]() |
8384100e1b | ||
![]() |
cd0349ee4d | ||
![]() |
b413e481cb | ||
![]() |
9f7e6048f8 | ||
![]() |
2802b77f21 | ||
![]() |
964ad43a27 | ||
![]() |
182be6e0ea | ||
![]() |
cd11f01ace | ||
![]() |
742eca5927 | ||
![]() |
48e7fed901 | ||
![]() |
0a4c0fe7cc | ||
![]() |
9037cb8a7d | ||
![]() |
c97cc34879 | ||
![]() |
1ac9217630 | ||
![]() |
e4036a2f14 | ||
![]() |
da9c73a767 | ||
![]() |
e4aaaf10c3 | ||
![]() |
a7be76ba0a | ||
![]() |
f7cc91903c | ||
![]() |
4a8a674bd3 | ||
![]() |
a8db25fbd8 | ||
![]() |
2dc81ed866 | ||
![]() |
c4762f3ff4 | ||
![]() |
14285973b8 | ||
![]() |
353ccf3ea7 | ||
![]() |
6b90d8ff1a | ||
![]() |
51e691f832 | ||
![]() |
6c7ac7a6ef | ||
![]() |
52ed1bf44a | ||
![]() |
3eab0b704e | ||
![]() |
1f32e02ba2 | ||
![]() |
074418f8f7 | ||
![]() |
b711b17193 | ||
![]() |
03c3d09583 | ||
![]() |
f49547d598 | ||
![]() |
7678be8e2b | ||
![]() |
7672215095 | ||
![]() |
18cf96b92b | ||
![]() |
94d597fd41 | ||
![]() |
24b47b50ea | ||
![]() |
e3dfa84d65 | ||
![]() |
ed1366f463 | ||
![]() |
5d5908a03f | ||
![]() |
3062bad19e | ||
![]() |
28832cbd3e | ||
![]() |
ce94073321 | ||
![]() |
fa61e02207 | ||
![]() |
d1dab83f10 | ||
![]() |
2b7d593ebe | ||
![]() |
e407b4730d | ||
![]() |
0d19e85a0d | ||
![]() |
dac6271e01 | ||
![]() |
8cae8edc55 | ||
![]() |
a3b0909e3f | ||
![]() |
ee30520b57 | ||
![]() |
536e686892 | ||
![]() |
ef767c2b9f | ||
![]() |
c1ecc13cb3 | ||
![]() |
c5e3ba536c | ||
![]() |
0e324c074a | ||
![]() |
a3ba7803db | ||
![]() |
49bf5db5ff | ||
![]() |
50981c26ad | ||
![]() |
2adbf7c933 | ||
![]() |
838ef0bb9f | ||
![]() |
43c2658962 | ||
![]() |
bbefa971d8 | ||
![]() |
cb97f2f13c | ||
![]() |
a657b9bb84 | ||
![]() |
2d2f55a4df | ||
![]() |
df16e6d022 | ||
![]() |
56212c6fa5 | ||
![]() |
bc964ce7f0 | ||
![]() |
ed4f55406c | ||
![]() |
03d5b18974 | ||
![]() |
53c486ccd1 | ||
![]() |
9a2a177b28 | ||
![]() |
18e12740d9 | ||
![]() |
5a24b670a2 | ||
![]() |
94c5c8f42e | ||
![]() |
e84d5fba11 | ||
![]() |
b808c0c5eb | ||
![]() |
782417528c | ||
![]() |
7757423d18 | ||
![]() |
e5a28f4f25 | ||
![]() |
c18d50910f | ||
![]() |
d4adb1f298 | ||
![]() |
fe0a822721 | ||
![]() |
9f427893b1 | ||
![]() |
3b840c684b | ||
![]() |
bc84fdc64a | ||
![]() |
401262c23d | ||
![]() |
795384ca2d | ||
![]() |
dfc3423c83 | ||
![]() |
22b5071c26 | ||
![]() |
4b9524c5c1 | ||
![]() |
9cd46c7f03 | ||
![]() |
232a6868ff | ||
![]() |
361e0d4fc7 | ||
![]() |
26d8d5343a | ||
![]() |
995aab8347 | ||
![]() |
399011552b | ||
![]() |
0c9f30364c | ||
![]() |
bdc17621ee | ||
![]() |
399c53a57e | ||
![]() |
f55e13bde4 | ||
![]() |
dea31e5744 | ||
![]() |
48d9df89ac | ||
![]() |
adf836d9ac | ||
![]() |
51d6948848 | ||
![]() |
7ce74cb5ec | ||
![]() |
29ba140816 | ||
![]() |
0ca4f3e1ba | ||
![]() |
0430e6794e | ||
![]() |
29fa7f827a | ||
![]() |
57d1001603 | ||
![]() |
96de4b3828 | ||
![]() |
c6cb2884f4 | ||
![]() |
27e81fe0ed | ||
![]() |
2c1db10986 | ||
![]() |
a7ba4bd086 | ||
![]() |
25449b424f | ||
![]() |
f6f89bd807 | ||
![]() |
370d7d6bdf | ||
![]() |
4dbf3359c1 | ||
![]() |
25eb7173bf | ||
![]() |
648c3d500b | ||
![]() |
33016c2977 | ||
![]() |
5679b061d2 | ||
![]() |
2eb2bdd615 | ||
![]() |
184cbfea23 | ||
![]() |
f88bc008e5 | ||
![]() |
a927312fb5 | ||
![]() |
5f13db2356 | ||
![]() |
64e84e2aa0 | ||
![]() |
901457e7aa | ||
![]() |
89a9c2ec24 | ||
![]() |
9e04457472 | ||
![]() |
6ecdbb677f | ||
![]() |
211ce43127 | ||
![]() |
f5555df990 | ||
![]() |
82c2422990 | ||
![]() |
734ebc1adb | ||
![]() |
eb3371beef | ||
![]() |
e1ef1063fe | ||
![]() |
c355a53485 | ||
![]() |
79de1d9ed4 | ||
![]() |
7fefa5c235 | ||
![]() |
94db78a0be | ||
![]() |
83a1b06b56 | ||
![]() |
1e42a38473 | ||
![]() |
c54ed53a81 | ||
![]() |
611a952232 | ||
![]() |
05e76105ad | ||
![]() |
ed56e5d631 | ||
![]() |
9253fa4471 | ||
![]() |
c85eb6bf8e | ||
![]() |
cc30d34e87 | ||
![]() |
14875a1101 | ||
![]() |
030aebb97f | ||
![]() |
6e2f36b6d4 | ||
![]() |
25a05eb156 | ||
![]() |
b71c4377f6 | ||
![]() |
d671341864 | ||
![]() |
383f712d43 | ||
![]() |
8a20cd77a0 | ||
![]() |
14023644ef | ||
![]() |
496fc42b94 | ||
![]() |
da0688ce8e | ||
![]() |
89d3707cb7 | ||
![]() |
3f5e395e2f | ||
![]() |
00ea1cab9f | ||
![]() |
5f36062ef3 | ||
![]() |
e562b6f42b | ||
![]() |
b76a94bd42 | ||
![]() |
4e11ff05de | ||
![]() |
080e3d7a42 | ||
![]() |
69e3348cd7 | ||
![]() |
6caa4baa00 | ||
![]() |
4729b19dc6 | ||
![]() |
8abbc4abbc | ||
![]() |
3a667bce8c | ||
![]() |
4c86102daf | ||
![]() |
15bf652f37 | ||
![]() |
eafed2b86c | ||
![]() |
79901cede9 | ||
![]() |
27dc82d7d0 | ||
![]() |
ae37c8cc7a | ||
![]() |
5eadfcc524 | ||
![]() |
5fd1e23255 | ||
![]() |
72bcc6702f | ||
![]() |
8889464e04 | ||
![]() |
af58b0c3b7 | ||
![]() |
e9e20229a3 | ||
![]() |
80ff6dc618 | ||
![]() |
fa30100160 | ||
![]() |
e6c20333b3 | ||
![]() |
3858400a6f | ||
![]() |
95eefbac20 | ||
![]() |
e1e731eb48 | ||
![]() |
f7ce4ff25c | ||
![]() |
c7b2ffbc8e | ||
![]() |
3a1502e2bb | ||
![]() |
b830f83a34 | ||
![]() |
2982e733bc | ||
![]() |
e89ce215c6 | ||
![]() |
b6345f8d07 | ||
![]() |
9d261bab48 | ||
![]() |
b6f875134e | ||
![]() |
90ceebdf91 | ||
![]() |
617e87e02c | ||
![]() |
dafd54ba2b | ||
![]() |
e8c3539709 | ||
![]() |
e5263dc0c8 | ||
![]() |
3584c710b9 | ||
![]() |
0b56ef5699 | ||
![]() |
90bd9bb626 | ||
![]() |
03e6a13896 | ||
![]() |
9fb3261f02 | ||
![]() |
0bc6b8b0d4 | ||
![]() |
18d2ced045 | ||
![]() |
6c75e0bee1 | ||
![]() |
0b981f42bb | ||
![]() |
82868a8588 | ||
![]() |
6e93777f54 | ||
![]() |
9349292464 | ||
![]() |
7084b3b52c | ||
![]() |
0f0f5fd0ab | ||
![]() |
cb0b942db3 | ||
![]() |
b1c9f83952 | ||
![]() |
1ff0efc97b | ||
![]() |
a4da2a9eb5 | ||
![]() |
ba3cfb5f87 | ||
![]() |
bf196935f6 | ||
![]() |
6e98343706 | ||
![]() |
de453ab5c1 | ||
![]() |
f408de4fc3 | ||
![]() |
7863927c3a | ||
![]() |
9fcf757021 | ||
![]() |
fc0547ccdf | ||
![]() |
22f8f117fb | ||
![]() |
2052579efc | ||
![]() |
b8f2583bc3 | ||
![]() |
6323a078e1 | ||
![]() |
ca0be3ec8a | ||
![]() |
91157c21ef | ||
![]() |
cc4fae10f5 | ||
![]() |
d180ff417d | ||
![]() |
8870b657d1 | ||
![]() |
81735b7b47 | ||
![]() |
7fd261347b | ||
![]() |
df796d432e | ||
![]() |
f6e36615d6 | ||
![]() |
0278735dbf | ||
![]() |
9c8d8fef16 | ||
![]() |
6897b24c10 | ||
![]() |
a2a3f59e65 | ||
![]() |
2626a74840 | ||
![]() |
689260f581 | ||
![]() |
f1a2c8be4b | ||
![]() |
0579d565dd | ||
![]() |
f141f5f908 | ||
![]() |
0c25252d9f | ||
![]() |
400b377aa8 | ||
![]() |
a5f3c434e0 | ||
![]() |
365f8046ac | ||
![]() |
4ac35d40cd | ||
![]() |
7691991a93 | ||
![]() |
d0c45b1857 | ||
![]() |
02750452df | ||
![]() |
41a81cbf15 | ||
![]() |
ff621d5bf3 | ||
![]() |
6d561a9796 | ||
![]() |
4784199038 | ||
![]() |
df35c8e707 | ||
![]() |
57eeaf1f75 | ||
![]() |
3cadc1796f | ||
![]() |
ae06f734ce | ||
![]() |
08a53362a7 | ||
![]() |
274c928ec0 | ||
![]() |
d75dda0c05 | ||
![]() |
0c40fcdaeb | ||
![]() |
0a1ba8a4a3 | ||
![]() |
018acc0a3c | ||
![]() |
3a293c6bc4 | ||
![]() |
9155d56190 | ||
![]() |
461dc13da9 | ||
![]() |
b48e2127b8 | ||
![]() |
11ab992dbb | ||
![]() |
4be2cdf90a | ||
![]() |
cdd5cb2876 | ||
![]() |
cdc67aa891 | ||
![]() |
6a22a2b867 | ||
![]() |
0883b23d0c | ||
![]() |
595459bfda | ||
![]() |
5141a4d292 | ||
![]() |
cf8b7607ae | ||
![]() |
b38fe00387 | ||
![]() |
5d446f0e14 | ||
![]() |
a592ece9c8 | ||
![]() |
9cb60c61d1 | ||
![]() |
90ed06c354 | ||
![]() |
22d64cb8f4 | ||
![]() |
453039e860 | ||
![]() |
e727162225 | ||
![]() |
a898a5996e | ||
![]() |
d501bb8d52 | ||
![]() |
7ab8ff56b3 | ||
![]() |
eda36512ec | ||
![]() |
04aee812f8 | ||
![]() |
6718cce203 | ||
![]() |
49f0bb6990 | ||
![]() |
38afcbb21f | ||
![]() |
87ab2beddf | ||
![]() |
a05a34239d | ||
![]() |
f11aba9648 | ||
![]() |
c2ef119e50 | ||
![]() |
8b6c99776e | ||
![]() |
463bffaeb6 | ||
![]() |
0cfd8032c0 | ||
![]() |
144d5ff0cc | ||
![]() |
ab5c65b08c | ||
![]() |
6b33bf3961 | ||
![]() |
89eb395e2d | ||
![]() |
d671d48869 | ||
![]() |
ed582fae91 | ||
![]() |
4d5c3ee0aa | ||
![]() |
02046fcdb4 | ||
![]() |
fbe27749a0 | ||
![]() |
eddab96a69 | ||
![]() |
ed3376352d | ||
![]() |
dfbb763031 | ||
![]() |
5cf13d9273 | ||
![]() |
5ef45fd12e | ||
![]() |
8a293a41f5 | ||
![]() |
931820a170 | ||
![]() |
e9944b964a | ||
![]() |
dbae1d2f8b | ||
![]() |
0dc8feba05 | ||
![]() |
5c7c2347f7 | ||
![]() |
d069907948 | ||
![]() |
725ab477a8 | ||
![]() |
d05ee9ff60 | ||
![]() |
3c1f6d97cc | ||
![]() |
5fe827f6c4 | ||
![]() |
76f9a93ed7 | ||
![]() |
df2506bfbb | ||
![]() |
b25ab04d2c | ||
![]() |
6f094e8a54 | ||
![]() |
e18ffc53f2 | ||
![]() |
0eea3176d6 | ||
![]() |
4f20977a8e | ||
![]() |
5bd63bb56b | ||
![]() |
f7103da818 | ||
![]() |
bf4922a7ef | ||
![]() |
6f7eac5c6d | ||
![]() |
d6e73a89f3 | ||
![]() |
269aefd405 | ||
![]() |
a6865f1639 | ||
![]() |
f55aa0b86e | ||
![]() |
02b34f05aa | ||
![]() |
37f42707e5 | ||
![]() |
17f3ba1434 | ||
![]() |
31dcc25ba5 | ||
![]() |
4da93f6a5e | ||
![]() |
5ed7d32749 | ||
![]() |
ab5b9dbdc9 | ||
![]() |
3b28bf07d1 | ||
![]() |
b626c9b450 | ||
![]() |
5430eca93e | ||
![]() |
b41c477f44 | ||
![]() |
5900413c08 | ||
![]() |
c2ceab741f | ||
![]() |
45ff4940eb | ||
![]() |
9c8a15cb64 | ||
![]() |
b09e54c961 | ||
![]() |
f44b7e202a | ||
![]() |
0f535e979f | ||
![]() |
4c2c01b4f6 | ||
![]() |
b1d48fe9a2 | ||
![]() |
41590f91ac | ||
![]() |
e9d1f4f46e | ||
![]() |
7f287412ba | ||
![]() |
2df094de2b | ||
![]() |
964ab5b351 | ||
![]() |
3f6e9a54fe | ||
![]() |
4ec5d5ae1e | ||
![]() |
c49b155c29 | ||
![]() |
fc602b1888 | ||
![]() |
81421992a2 | ||
![]() |
4ef31f9331 | ||
![]() |
d7e304badf | ||
![]() |
bf3f1b4b49 | ||
![]() |
2ac0ff03fc | ||
![]() |
d10553d624 | ||
![]() |
b1dfc3cd23 | ||
![]() |
696efe349e | ||
![]() |
6a32722acc | ||
![]() |
8eaec56c6b | ||
![]() |
60d3c9342d | ||
![]() |
4dc2433e8b | ||
![]() |
2bd5039f28 | ||
![]() |
8b1b14a704 | ||
![]() |
5e674ce1d0 | ||
![]() |
3656bcf752 | ||
![]() |
39093fc2bc | ||
![]() |
efa5838be4 | ||
![]() |
1c6ad2fa66 | ||
![]() |
af144e1b77 | ||
![]() |
b451bfed81 | ||
![]() |
3e32c50936 | ||
![]() |
208b15637a | ||
![]() |
c958cce769 | ||
![]() |
602ec54579 | ||
![]() |
fa2bfc5d9d | ||
![]() |
94f906b34c | ||
![]() |
60c93456c0 | ||
![]() |
a4f210379d | ||
![]() |
27e6205a37 | ||
![]() |
3db6d82904 | ||
![]() |
b8ddfd642e | ||
![]() |
c98acd42db | ||
![]() |
39f418f2d2 | ||
![]() |
9fbd484dfe | ||
![]() |
1773f2aadc | ||
![]() |
cb1b72d6ba | ||
![]() |
f5a2ec961d | ||
![]() |
bf40e77d65 | ||
![]() |
568bdef61f | ||
![]() |
2303521778 | ||
![]() |
3bf2946d13 | ||
![]() |
484e5cb3e8 | ||
![]() |
fbe8b6c34d | ||
![]() |
4e7397dc9d | ||
![]() |
a6189106e1 | ||
![]() |
ed6123a3e6 | ||
![]() |
0cd5deaa3f | ||
![]() |
6c047e2678 | ||
![]() |
405a480cae | ||
![]() |
b4e69bab71 | ||
![]() |
db81edfb2b | ||
![]() |
24829bc44f | ||
![]() |
c8594045df | ||
![]() |
ea3f9b971f | ||
![]() |
380974eed4 | ||
![]() |
8151403bf6 | ||
![]() |
16f5e76f00 | ||
![]() |
b6b178cac0 | ||
![]() |
0f020366e3 | ||
![]() |
27a19be369 | ||
![]() |
0c166eb307 | ||
![]() |
79d73c28a7 | ||
![]() |
2aed01b530 | ||
![]() |
3fb0d61271 | ||
![]() |
599acaf514 | ||
![]() |
5f4103a4a7 | ||
![]() |
c7c72231c7 | ||
![]() |
6887a4419e | ||
![]() |
db5cb6233c | ||
![]() |
963829712d | ||
![]() |
46ceccfbb3 | ||
![]() |
aaf3039967 | ||
![]() |
2509f18def | ||
![]() |
a1e2d79613 | ||
![]() |
96ba5c3983 | ||
![]() |
041282190a | ||
![]() |
8cdd5de75c | ||
![]() |
a95c232f11 | ||
![]() |
c9aba288b4 | ||
![]() |
35a9d502af | ||
![]() |
409c8783fe | ||
![]() |
3adc3d7732 | ||
![]() |
ec19712388 | ||
![]() |
2c89e89c84 | ||
![]() |
e602a464db | ||
![]() |
ffc0651d89 | ||
![]() |
7162efd836 | ||
![]() |
8e7d782102 | ||
![]() |
dc2028f99c | ||
![]() |
f12ba5f7a9 | ||
![]() |
45fb21e32d | ||
![]() |
ecbb417736 | ||
![]() |
3a59a862d5 | ||
![]() |
e34fab0045 | ||
![]() |
7254ebe0e3 | ||
![]() |
b43bc3f32d | ||
![]() |
ca3d13b5cc | ||
![]() |
c8818bcce3 | ||
![]() |
b234b5937a | ||
![]() |
1bdef0f2f7 | ||
![]() |
56fb61bd6f | ||
![]() |
2c7d0b8909 | ||
![]() |
cbb8d76da7 | ||
![]() |
cce925c06c | ||
![]() |
505a4bfc34 | ||
![]() |
58e151966c | ||
![]() |
8a6c9b7afc | ||
![]() |
e72e2071b0 | ||
![]() |
5d3af27928 | ||
![]() |
5dc0bedbc4 | ||
![]() |
8f7ae2665c | ||
![]() |
10fdf819d3 | ||
![]() |
02928601ef | ||
![]() |
c227f6dc2c | ||
![]() |
673f0224c9 | ||
![]() |
79c602f59c | ||
![]() |
07c070e253 | ||
![]() |
9bda3bd477 | ||
![]() |
2c9ad9562e | ||
![]() |
c264ee22e7 | ||
![]() |
f194a689cc | ||
![]() |
a36b350954 | ||
![]() |
db4278fb9d | ||
![]() |
39ba4cff2f | ||
![]() |
d68da74790 | ||
![]() |
5fc45cd736 | ||
![]() |
5ae2f3d081 | ||
![]() |
478bf643bf | ||
![]() |
7929895b11 | ||
![]() |
da11a72b4c | ||
![]() |
1649368cee | ||
![]() |
a528d62c16 | ||
![]() |
bd13dbdad0 | ||
![]() |
8e7ffd9e16 | ||
![]() |
f0bff09b5e | ||
![]() |
0e959b3019 | ||
![]() |
983cd9c3fc | ||
![]() |
2236ca3e12 | ||
![]() |
f3afa6a7d9 | ||
![]() |
ce7e2e3243 | ||
![]() |
13416825b1 | ||
![]() |
6c664e7ba9 | ||
![]() |
34359617b5 | ||
![]() |
9e2696b9bc | ||
![]() |
bf840e8bfa | ||
![]() |
1f03c140f5 | ||
![]() |
2de161ce0e | ||
![]() |
1171106afb | ||
![]() |
f57ae73071 | ||
![]() |
59872b5698 | ||
![]() |
7cd8ea00d1 | ||
![]() |
4b2f38926a | ||
![]() |
537c95cf29 | ||
![]() |
81a5722708 | ||
![]() |
c150b913ac | ||
![]() |
3e4b67db6c | ||
![]() |
d727f8ff50 | ||
![]() |
9546bf1dee | ||
![]() |
dd9ce34d18 | ||
![]() |
73f2d972e4 | ||
![]() |
7d699c6c35 | ||
![]() |
21f23f67f4 | ||
![]() |
8874ba2779 | ||
![]() |
420538e6e7 | ||
![]() |
8eb68b54d9 | ||
![]() |
80202f33cb | ||
![]() |
c24579bfb2 | ||
![]() |
21256c4529 | ||
![]() |
668626b920 | ||
![]() |
cbfa3bb56d | ||
![]() |
536fcf02d7 | ||
![]() |
a8ac3acbbe | ||
![]() |
7980155375 | ||
![]() |
aa855e31c8 | ||
![]() |
675ee8e813 | ||
![]() |
50ccce7387 | ||
![]() |
40b561ea69 | ||
![]() |
a0f73bd30f | ||
![]() |
1b7fcce42d | ||
![]() |
4749af6e90 | ||
![]() |
f7ad40263b | ||
![]() |
e5b25bfa58 | ||
![]() |
1d23adcda3 | ||
![]() |
0216d36ab7 | ||
![]() |
2bec20ad76 | ||
![]() |
93c1245b0f | ||
![]() |
72504d7619 | ||
![]() |
320aa34d39 | ||
![]() |
87f2a4242e | ||
![]() |
9bf0cbd659 | ||
![]() |
b1470fd9b8 | ||
![]() |
08016dc3b6 | ||
![]() |
7a448f5528 | ||
![]() |
4ac23bf14c | ||
![]() |
bc708dee30 | ||
![]() |
2888e5748e | ||
![]() |
88f0a33e69 | ||
![]() |
3165f92b6b | ||
![]() |
3bd0fca633 | ||
![]() |
cdff10d281 | ||
![]() |
e425741c34 | ||
![]() |
20a367b243 | ||
![]() |
fdded9e7ee | ||
![]() |
7d29bff136 | ||
![]() |
0abfbeed3c | ||
![]() |
35b7c3038a | ||
![]() |
46dd96a4b7 | ||
![]() |
788232ca35 | ||
![]() |
3b458738e0 | ||
![]() |
2c8fc67ab1 | ||
![]() |
9b3ed3ed72 | ||
![]() |
c59197e87a | ||
![]() |
03e3c88d8b | ||
![]() |
39693786ef | ||
![]() |
357c324df1 | ||
![]() |
650482208c | ||
![]() |
2acad4a78c | ||
![]() |
65ee4e1916 | ||
![]() |
275bbc81f0 | ||
![]() |
beafcf74ab | ||
![]() |
e47909bb3e | ||
![]() |
0b3b9c2257 | ||
![]() |
8fb7a7e4cd | ||
![]() |
c5ed148c52 | ||
![]() |
e774c710a8 | ||
![]() |
d237180a98 | ||
![]() |
d8b618f7c3 | ||
![]() |
e888a95bd1 | ||
![]() |
36c2404a46 | ||
![]() |
ba673beb82 | ||
![]() |
4b56701152 | ||
![]() |
59227116f3 | ||
![]() |
9b0975b2ac | ||
![]() |
3a39a5caa3 | ||
![]() |
93e270f379 | ||
![]() |
98c81fa2af | ||
![]() |
1bb32a05a9 | ||
![]() |
5dd4b77270 | ||
![]() |
737d1aac7c | ||
![]() |
886feae4ca | ||
![]() |
1dfe26f14f | ||
![]() |
d66fcd23df | ||
![]() |
bdfb47e999 | ||
![]() |
10300cc478 | ||
![]() |
ababa639b3 | ||
![]() |
9f6569d658 | ||
![]() |
24c22ebdc7 | ||
![]() |
6c365fffde | ||
![]() |
dbb80dd6c0 | ||
![]() |
624834de9c | ||
![]() |
d31995f878 | ||
![]() |
017b1cae26 | ||
![]() |
c09f15b0e9 | ||
![]() |
68284bed74 | ||
![]() |
9a44d668d6 | ||
![]() |
67e0197a7a | ||
![]() |
a5a8cfa17d | ||
![]() |
60c3e701e9 | ||
![]() |
b9b129dcf5 | ||
![]() |
d882ab236a | ||
![]() |
140cc0e486 | ||
![]() |
6ac7c0f893 | ||
![]() |
096d50617f | ||
![]() |
9dd8c0cc4f | ||
![]() |
de0fab86ec | ||
![]() |
bb36dd3893 | ||
![]() |
ada837ee95 | ||
![]() |
67e73173f6 | ||
![]() |
4b63829eef | ||
![]() |
029411d3fa | ||
![]() |
6ba033f934 | ||
![]() |
3734fa948f | ||
![]() |
336742e335 | ||
![]() |
66ca424d3a | ||
![]() |
2da0a91a36 | ||
![]() |
fee1bde231 | ||
![]() |
4a94430bf0 | ||
![]() |
cc337f7b1e | ||
![]() |
d8a06777fe | ||
![]() |
9207eedbfb | ||
![]() |
c97b832648 | ||
![]() |
4ef629f79d | ||
![]() |
0b4e3c3db5 | ||
![]() |
f12cc523b4 | ||
![]() |
5c3c9d2ed1 | ||
![]() |
3ac3673326 | ||
![]() |
1a3940575e | ||
![]() |
16c8b1efab | ||
![]() |
0e789be09f | ||
![]() |
a948c7d69d | ||
![]() |
d8ec0103a9 | ||
![]() |
50161670ce | ||
![]() |
c1f612dce1 | ||
![]() |
6fb74482d7 | ||
![]() |
4b680ffa5f | ||
![]() |
c71c8d56ce | ||
![]() |
295ae7b4bc | ||
![]() |
839c884cef | ||
![]() |
13ffe7acfb | ||
![]() |
39a0c0d96e | ||
![]() |
a95a542148 | ||
![]() |
b3cb2ac3ee | ||
![]() |
759fe54132 | ||
![]() |
519a888e82 | ||
![]() |
4f1e4e7471 | ||
![]() |
7b8a32f630 | ||
![]() |
92d91a65bb | ||
![]() |
dab5289177 | ||
![]() |
a77cb1e579 | ||
![]() |
01bdda0ae6 | ||
![]() |
fbe35e6e6b | ||
![]() |
a3cd74e30b | ||
![]() |
dbd4781de1 | ||
![]() |
6d48316436 | ||
![]() |
cca6965cd1 |
@@ -79,6 +79,7 @@ components: &components
|
||||
- homeassistant/components/group/**
|
||||
- homeassistant/components/hassio/**
|
||||
- homeassistant/components/homeassistant/**
|
||||
- homeassistant/components/homeassistant_hardware/**
|
||||
- homeassistant/components/http/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/input_boolean/**
|
||||
|
@@ -58,7 +58,13 @@
|
||||
],
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff"
|
||||
}
|
||||
},
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": ["homeassistant/components/*/manifest.json"],
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
|
||||
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
34
.github/workflows/ci.yaml
vendored
34
.github/workflows/ci.yaml
vendored
@@ -40,9 +40,9 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2024.11"
|
||||
HA_SHORT_VERSION: "2024.12"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@@ -622,13 +622,13 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
@@ -819,11 +819,7 @@ jobs:
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
name: Split tests for full run Python ${{ matrix.python-version }}
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
@@ -836,11 +832,11 @@ jobs:
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
@@ -858,7 +854,7 @@ jobs:
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest_buckets-${{ matrix.python-version }}
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
overwrite: true
|
||||
|
||||
@@ -923,7 +919,7 @@ jobs:
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: pytest_buckets-${{ matrix.python-version }}
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -949,6 +945,7 @@ jobs:
|
||||
--timeout=9 \
|
||||
--durations=10 \
|
||||
--numprocesses auto \
|
||||
--snapshot-details \
|
||||
--dist=loadfile \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
@@ -1071,6 +1068,7 @@ jobs:
|
||||
-qq \
|
||||
--timeout=20 \
|
||||
--numprocesses 1 \
|
||||
--snapshot-details \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=10 \
|
||||
@@ -1102,7 +1100,7 @@ jobs:
|
||||
./script/check_dirty
|
||||
|
||||
pytest-postgres:
|
||||
runs-on: ubuntu-22.04
|
||||
runs-on: ubuntu-24.04
|
||||
services:
|
||||
postgres:
|
||||
image: ${{ matrix.postgresql-group }}
|
||||
@@ -1142,7 +1140,9 @@ jobs:
|
||||
sudo apt-get -y install \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libturbojpeg
|
||||
sudo /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y
|
||||
sudo apt-get -y install \
|
||||
postgresql-server-dev-14
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
@@ -1197,6 +1197,7 @@ jobs:
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
--numprocesses 1 \
|
||||
--snapshot-details \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
@@ -1343,6 +1344,7 @@ jobs:
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
--numprocesses auto \
|
||||
--snapshot-details \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.0
|
||||
uses: github/codeql-action/init@v3.27.1
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.0
|
||||
uses: github/codeql-action/analyze@v3.27.1
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
30
.github/workflows/wheels.yml
vendored
30
.github/workflows/wheels.yml
vendored
@@ -112,7 +112,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312"]
|
||||
abi: ["cp312", "cp313"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
@@ -135,14 +135,14 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;multidict;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -156,7 +156,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312"]
|
||||
abi: ["cp312", "cp313"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
@@ -198,6 +198,7 @@ jobs:
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
if: matrix.abi == 'cp312'
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
@@ -208,7 +209,8 @@ jobs:
|
||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
if: matrix.abi == 'cp312'
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -223,43 +225,43 @@ jobs:
|
||||
pip: "'cython<3'"
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.1
|
||||
rev: v0.7.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@@ -124,6 +124,7 @@ homeassistant.components.bryant_evolution.*
|
||||
homeassistant.components.bthome.*
|
||||
homeassistant.components.button.*
|
||||
homeassistant.components.calendar.*
|
||||
homeassistant.components.cambridge_audio.*
|
||||
homeassistant.components.camera.*
|
||||
homeassistant.components.canary.*
|
||||
homeassistant.components.cert_expiry.*
|
||||
@@ -208,6 +209,7 @@ homeassistant.components.geo_location.*
|
||||
homeassistant.components.geocaching.*
|
||||
homeassistant.components.gios.*
|
||||
homeassistant.components.glances.*
|
||||
homeassistant.components.go2rtc.*
|
||||
homeassistant.components.goalzero.*
|
||||
homeassistant.components.google.*
|
||||
homeassistant.components.google_assistant_sdk.*
|
||||
@@ -322,11 +324,13 @@ homeassistant.components.moon.*
|
||||
homeassistant.components.mopeka.*
|
||||
homeassistant.components.motionmount.*
|
||||
homeassistant.components.mqtt.*
|
||||
homeassistant.components.music_assistant.*
|
||||
homeassistant.components.my.*
|
||||
homeassistant.components.mysensors.*
|
||||
homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
@@ -336,6 +340,7 @@ homeassistant.components.nfandroidtv.*
|
||||
homeassistant.components.nightscout.*
|
||||
homeassistant.components.nissan_leaf.*
|
||||
homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.number.*
|
||||
|
10
.vscode/settings.default.json
vendored
10
.vscode/settings.default.json
vendored
@@ -6,5 +6,13 @@
|
||||
// https://code.visualstudio.com/docs/python/testing#_pytest-configuration-settings
|
||||
"python.testing.pytestEnabled": false,
|
||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||
"pylint.importStrategy": "fromEnvironment"
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
"json.schemas": [
|
||||
{
|
||||
"fileMatch": [
|
||||
"homeassistant/components/*/manifest.json"
|
||||
],
|
||||
"url": "./script/json_schemas/manifest_schema.json"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
20
CODEOWNERS
20
CODEOWNERS
@@ -496,8 +496,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||
/tests/components/fritzbox/ @mib1185 @flabbamann
|
||||
/homeassistant/components/fritzbox_callmonitor/ @cdce8p
|
||||
@@ -617,8 +617,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/hlk_sw16/ @jameshilliard
|
||||
/homeassistant/components/holiday/ @jrieger @gjohansson-ST
|
||||
/tests/components/holiday/ @jrieger @gjohansson-ST
|
||||
/homeassistant/components/home_connect/ @DavidMStraub
|
||||
/tests/components/home_connect/ @DavidMStraub
|
||||
/homeassistant/components/home_connect/ @DavidMStraub @Diegorro98
|
||||
/tests/components/home_connect/ @DavidMStraub @Diegorro98
|
||||
/homeassistant/components/homeassistant/ @home-assistant/core
|
||||
/tests/components/homeassistant/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||
@@ -659,6 +659,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/hunterdouglas_powerview/ @bdraco @kingy444 @trullock
|
||||
/homeassistant/components/husqvarna_automower/ @Thomas55555
|
||||
/tests/components/husqvarna_automower/ @Thomas55555
|
||||
/homeassistant/components/husqvarna_automower_ble/ @alistair23
|
||||
/tests/components/husqvarna_automower_ble/ @alistair23
|
||||
/homeassistant/components/huum/ @frwickst
|
||||
/tests/components/huum/ @frwickst
|
||||
/homeassistant/components/hvv_departures/ @vigonotion
|
||||
@@ -819,6 +821,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/lektrico/ @lektrico
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
@@ -950,6 +954,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant
|
||||
/tests/components/music_assistant/ @music-assistant
|
||||
/homeassistant/components/mutesync/ @currentoor
|
||||
/tests/components/mutesync/ @currentoor
|
||||
/homeassistant/components/my/ @home-assistant/core
|
||||
@@ -964,6 +970,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/neato/ @Santobert
|
||||
/tests/components/neato/ @Santobert
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||
@@ -1004,6 +1012,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/homeassistant/components/nordpool/ @gjohansson-ST
|
||||
/tests/components/nordpool/ @gjohansson-ST
|
||||
/homeassistant/components/notify/ @home-assistant/core
|
||||
/tests/components/notify/ @home-assistant/core
|
||||
/homeassistant/components/notify_events/ @matrozov @papajojo
|
||||
@@ -1089,6 +1099,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
/tests/components/p1_monitor/ @klaasnicolaas
|
||||
/homeassistant/components/palazzetti/ @dotvav
|
||||
/tests/components/palazzetti/ @dotvav
|
||||
/homeassistant/components/panel_custom/ @home-assistant/frontend
|
||||
/tests/components/panel_custom/ @home-assistant/frontend
|
||||
/homeassistant/components/peco/ @IceBotYT
|
||||
|
@@ -7,12 +7,13 @@ FROM ${BUILD_FROM}
|
||||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
UV_SYSTEM_PYTHON=true
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
UV_NO_CACHE=true
|
||||
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.4.22
|
||||
RUN pip3 install uv==0.5.0
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
@@ -54,7 +55,7 @@ RUN \
|
||||
"armv7") go2rtc_suffix='arm' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.4/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.6/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
|
@@ -9,6 +9,7 @@ import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .backup_restore import restore_backup
|
||||
from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
|
||||
|
||||
FAULT_LOG_FILENAME = "home-assistant.log.fault"
|
||||
@@ -182,6 +183,9 @@ def main() -> int:
|
||||
return scripts.run(args.script)
|
||||
|
||||
config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config))
|
||||
if restore_backup(config_dir):
|
||||
return RESTART_EXIT_CODE
|
||||
|
||||
ensure_config_path(config_dir)
|
||||
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
|
126
homeassistant/backup_restore.py
Normal file
126
homeassistant/backup_restore.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""Home Assistant module to handle restoring backups."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import sys
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
import securetar
|
||||
|
||||
from .const import __version__ as HA_VERSION
|
||||
|
||||
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||
KEEP_PATHS = ("backups",)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RestoreBackupFileContent:
|
||||
"""Definition for restore backup file content."""
|
||||
|
||||
backup_file_path: Path
|
||||
|
||||
|
||||
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||
"""Return the contents of the restore backup file."""
|
||||
instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE)
|
||||
try:
|
||||
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||
return RestoreBackupFileContent(
|
||||
backup_file_path=Path(instruction_content["path"])
|
||||
)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
|
||||
def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
"""Delete all files and directories in the config directory except for the backups directory."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
||||
config_contents = sorted(
|
||||
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
||||
)
|
||||
|
||||
for entry in config_contents:
|
||||
entrypath = config_dir.joinpath(entry)
|
||||
|
||||
if entrypath.is_file():
|
||||
entrypath.unlink()
|
||||
elif entrypath.is_dir():
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
|
||||
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
"""Extract the backup file to the config directory."""
|
||||
with (
|
||||
TemporaryDirectory() as tempdir,
|
||||
securetar.SecureTarFile(
|
||||
backup_file_path,
|
||||
gzip=False,
|
||||
mode="r",
|
||||
) as ostf,
|
||||
):
|
||||
ostf.extractall(
|
||||
path=Path(tempdir, "extracted"),
|
||||
members=securetar.secure_path(ostf),
|
||||
filter="fully_trusted",
|
||||
)
|
||||
backup_meta_file = Path(tempdir, "extracted", "backup.json")
|
||||
backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8"))
|
||||
|
||||
if (
|
||||
backup_meta_version := AwesomeVersion(
|
||||
backup_meta["homeassistant"]["version"]
|
||||
)
|
||||
) > HA_VERSION:
|
||||
raise ValueError(
|
||||
f"You need at least Home Assistant version {backup_meta_version} to restore this backup"
|
||||
)
|
||||
|
||||
with securetar.SecureTarFile(
|
||||
Path(
|
||||
tempdir,
|
||||
"extracted",
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
mode="r",
|
||||
) as istf:
|
||||
for member in istf.getmembers():
|
||||
if member.name == "data":
|
||||
continue
|
||||
member.name = member.name.replace("data/", "")
|
||||
_clear_configuration_directory(config_dir)
|
||||
istf.extractall(
|
||||
path=config_dir,
|
||||
members=[
|
||||
member
|
||||
for member in securetar.secure_path(istf)
|
||||
if member.name != "data"
|
||||
],
|
||||
filter="fully_trusted",
|
||||
)
|
||||
|
||||
|
||||
def restore_backup(config_dir_path: str) -> bool:
|
||||
"""Restore the backup file if any.
|
||||
|
||||
Returns True if a restore backup file was found and restored, False otherwise.
|
||||
"""
|
||||
config_dir = Path(config_dir_path)
|
||||
if not (restore_content := restore_backup_file_content(config_dir)):
|
||||
return False
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
backup_file_path = restore_content.backup_file_path
|
||||
_LOGGER.info("Restoring %s", backup_file_path)
|
||||
try:
|
||||
_extract_backup(config_dir, backup_file_path)
|
||||
except FileNotFoundError as err:
|
||||
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
||||
_LOGGER.info("Restore complete, restarting")
|
||||
return True
|
5
homeassistant/brands/husqvarna.json
Normal file
5
homeassistant/brands/husqvarna.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "husqvarna",
|
||||
"name": "Husqvarna",
|
||||
"integrations": ["husqvarna_automower", "husqvarna_automower_ble"]
|
||||
}
|
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "lg",
|
||||
"name": "LG",
|
||||
"integrations": ["lg_netcast", "lg_soundbar", "webostv"]
|
||||
"integrations": ["lg_netcast", "lg_soundbar", "lg_thinq", "webostv"]
|
||||
}
|
||||
|
@@ -7,7 +7,6 @@ from typing import Any
|
||||
from adguardhome import AdGuardHome, AdGuardHomeConnectionError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.hassio import HassioServiceInfo
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -18,6 +17,7 @@ from homeassistant.const import (
|
||||
CONF_VERIFY_SSL,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
@@ -55,6 +55,7 @@ async def async_setup_entry(
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name="Advantage Air",
|
||||
update_method=async_get,
|
||||
update_interval=timedelta(seconds=ADVANTAGE_AIR_SYNC_INTERVAL),
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""The AEMET OpenData component."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||
@@ -13,20 +12,10 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_STATION_UPDATES, PLATFORMS
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AemetConfigEntry = ConfigEntry[AemetData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AemetData:
|
||||
"""Aemet runtime data."""
|
||||
|
||||
name: str
|
||||
coordinator: WeatherUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool:
|
||||
"""Set up AEMET OpenData as config entry."""
|
||||
@@ -46,7 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
||||
except AemetError as err:
|
||||
raise ConfigEntryNotReady(err) from err
|
||||
|
||||
weather_coordinator = WeatherUpdateCoordinator(hass, aemet)
|
||||
weather_coordinator = WeatherUpdateCoordinator(hass, entry, aemet)
|
||||
await weather_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
@@ -19,6 +20,7 @@ from aemet_opendata.helpers import dict_nested_value
|
||||
from aemet_opendata.interface import AEMET
|
||||
|
||||
from homeassistant.components.weather import Forecast
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -29,6 +31,16 @@ _LOGGER = logging.getLogger(__name__)
|
||||
API_TIMEOUT: Final[int] = 120
|
||||
WEATHER_UPDATE_INTERVAL = timedelta(minutes=10)
|
||||
|
||||
type AemetConfigEntry = ConfigEntry[AemetData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AemetData:
|
||||
"""Aemet runtime data."""
|
||||
|
||||
name: str
|
||||
coordinator: WeatherUpdateCoordinator
|
||||
|
||||
|
||||
class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Weather data update coordinator."""
|
||||
@@ -36,6 +48,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: AemetConfigEntry,
|
||||
aemet: AEMET,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
@@ -44,6 +57,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=WEATHER_UPDATE_INTERVAL,
|
||||
)
|
||||
|
@@ -15,7 +15,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AemetConfigEntry
|
||||
from .coordinator import AemetConfigEntry
|
||||
|
||||
TO_REDACT_CONFIG = [
|
||||
CONF_API_KEY,
|
||||
|
@@ -55,7 +55,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import AemetConfigEntry
|
||||
from .const import (
|
||||
ATTR_API_CONDITION,
|
||||
ATTR_API_FORECAST_CONDITION,
|
||||
@@ -87,7 +86,7 @@ from .const import (
|
||||
ATTR_API_WIND_SPEED,
|
||||
CONDITIONS_MAP,
|
||||
)
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
|
||||
|
||||
@@ -249,6 +248,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
||||
name="Rain",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AemetSensorEntityDescription(
|
||||
key=ATTR_API_RAIN_PROB,
|
||||
@@ -263,6 +263,7 @@ WEATHER_SENSORS: Final[tuple[AemetSensorEntityDescription, ...]] = (
|
||||
name="Snow",
|
||||
native_unit_of_measurement=UnitOfVolumetricFlux.MILLIMETERS_PER_HOUR,
|
||||
device_class=SensorDeviceClass.PRECIPITATION_INTENSITY,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
AemetSensorEntityDescription(
|
||||
key=ATTR_API_SNOW_PROB,
|
||||
|
@@ -27,9 +27,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AemetConfigEntry
|
||||
from .const import CONDITIONS_MAP
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
|
||||
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["agent"],
|
||||
"requirements": ["agent-py==0.0.23"]
|
||||
"requirements": ["agent-py==0.0.24"]
|
||||
}
|
||||
|
@@ -1,5 +1,7 @@
|
||||
"""Config flow for AirNow integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -12,7 +14,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -120,12 +121,12 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
) -> AirNowOptionsFlowHandler:
|
||||
"""Return the options flow."""
|
||||
return AirNowOptionsFlowHandler(config_entry)
|
||||
return AirNowOptionsFlowHandler()
|
||||
|
||||
|
||||
class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class AirNowOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle an options flow for AirNow."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -136,12 +137,7 @@ class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
return self.async_create_entry(data=user_input)
|
||||
|
||||
options_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_RADIUS): vol.All(
|
||||
int,
|
||||
vol.Range(min=5),
|
||||
),
|
||||
}
|
||||
{vol.Optional(CONF_RADIUS): vol.All(int, vol.Range(min=5))}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -42,6 +42,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_method=_update_method,
|
||||
update_interval=SCAN_INTERVAL,
|
||||
|
@@ -2,75 +2,27 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
||||
from bleak_retry_connector import close_stale_connections_by_address
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, MAX_RETRIES_AFTER_STARTUP
|
||||
from .const import MAX_RETRIES_AFTER_STARTUP
|
||||
from .coordinator import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
AirthingsBLEDataUpdateCoordinator = DataUpdateCoordinator[AirthingsDevice]
|
||||
AirthingsBLEConfigEntry = ConfigEntry[AirthingsBLEDataUpdateCoordinator]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AirthingsBLEConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Airthings BLE device from a config entry."""
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
address = entry.unique_id
|
||||
|
||||
is_metric = hass.config.units is METRIC_SYSTEM
|
||||
assert address is not None
|
||||
|
||||
await close_stale_connections_by_address(address)
|
||||
|
||||
ble_device = bluetooth.async_ble_device_from_address(hass, address)
|
||||
|
||||
if not ble_device:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not find Airthings device with address {address}"
|
||||
)
|
||||
|
||||
airthings = AirthingsBluetoothDeviceData(_LOGGER, is_metric)
|
||||
|
||||
async def _async_update_method() -> AirthingsDevice:
|
||||
"""Get data from Airthings BLE."""
|
||||
try:
|
||||
data = await airthings.update_device(ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
|
||||
return data
|
||||
|
||||
coordinator: AirthingsBLEDataUpdateCoordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_method=_async_update_method,
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||
)
|
||||
|
||||
coordinator = AirthingsBLEDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
# Once its setup and we know we are not going to delay
|
||||
# the startup of Home Assistant, we can set the max attempts
|
||||
# to a higher value. If the first connection attempt fails,
|
||||
# Home Assistant's built-in retry logic will take over.
|
||||
airthings.set_max_attempts(MAX_RETRIES_AFTER_STARTUP)
|
||||
coordinator.airthings.set_max_attempts(MAX_RETRIES_AFTER_STARTUP)
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
|
68
homeassistant/components/airthings_ble/coordinator.py
Normal file
68
homeassistant/components/airthings_ble/coordinator.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""The Airthings BLE integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from airthings_ble import AirthingsBluetoothDeviceData, AirthingsDevice
|
||||
from bleak.backends.device import BLEDevice
|
||||
from bleak_retry_connector import close_stale_connections_by_address
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AirthingsBLEConfigEntry = ConfigEntry[AirthingsBLEDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirthingsBLEDataUpdateCoordinator(DataUpdateCoordinator[AirthingsDevice]):
|
||||
"""Class to manage fetching Airthings BLE data."""
|
||||
|
||||
ble_device: BLEDevice
|
||||
config_entry: AirthingsBLEConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AirthingsBLEConfigEntry) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.airthings = AirthingsBluetoothDeviceData(
|
||||
_LOGGER, hass.config.units is METRIC_SYSTEM
|
||||
)
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
address = self.config_entry.unique_id
|
||||
|
||||
assert address is not None
|
||||
|
||||
await close_stale_connections_by_address(address)
|
||||
|
||||
ble_device = bluetooth.async_ble_device_from_address(self.hass, address)
|
||||
|
||||
if not ble_device:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not find Airthings device with address {address}"
|
||||
)
|
||||
self.ble_device = ble_device
|
||||
|
||||
async def _async_update_data(self) -> AirthingsDevice:
|
||||
"""Get data from Airthings BLE."""
|
||||
try:
|
||||
data = await self.airthings.update_device(self.ble_device)
|
||||
except Exception as err:
|
||||
raise UpdateFailed(f"Unable to fetch data: {err}") from err
|
||||
|
||||
return data
|
@@ -24,5 +24,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/airthings_ble",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["airthings-ble==0.9.1"]
|
||||
"requirements": ["airthings-ble==0.9.2"]
|
||||
}
|
||||
|
@@ -34,8 +34,8 @@ from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from . import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator
|
||||
from .const import DOMAIN, VOLUME_BECQUEREL, VOLUME_PICOCURIE
|
||||
from .coordinator import AirthingsBLEConfigEntry, AirthingsBLEDataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
@@ -9,8 +9,6 @@ from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.CLIMATE, Platform.COVER]
|
||||
|
||||
type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient]
|
||||
@@ -19,8 +17,6 @@ type Airtouch5ConfigEntry = ConfigEntry[Airtouch5SimpleClient]
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: Airtouch5ConfigEntry) -> bool:
|
||||
"""Set up Airtouch 5 from a config entry."""
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
|
||||
# Create API instance
|
||||
host = entry.data[CONF_HOST]
|
||||
client = Airtouch5SimpleClient(host)
|
||||
|
@@ -204,6 +204,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirVisualConfigEntry) ->
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=entry,
|
||||
name=async_get_geography_id(entry.data),
|
||||
# We give a placeholder update interval in order to create the coordinator;
|
||||
# then, below, we use the coordinator's presence (along with any other
|
||||
|
@@ -81,6 +81,7 @@ async def async_setup_entry(
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
LOGGER,
|
||||
config_entry=entry,
|
||||
name="Node/Pro data",
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
update_method=async_get_data,
|
||||
|
@@ -310,6 +310,10 @@ class AirzoneDeviceClimate(AirzoneClimate):
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
hvac_mode = kwargs.get(ATTR_HVAC_MODE)
|
||||
if hvac_mode is not None:
|
||||
await self.async_set_hvac_mode(hvac_mode)
|
||||
|
||||
params: dict[str, Any] = {}
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
params[API_SETPOINT] = {
|
||||
@@ -333,9 +337,6 @@ class AirzoneDeviceClimate(AirzoneClimate):
|
||||
}
|
||||
await self._async_update_params(params)
|
||||
|
||||
if ATTR_HVAC_MODE in kwargs:
|
||||
await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE])
|
||||
|
||||
|
||||
class AirzoneDeviceGroupClimate(AirzoneClimate):
|
||||
"""Define an Airzone Cloud DeviceGroup base class."""
|
||||
@@ -366,6 +367,10 @@ class AirzoneDeviceGroupClimate(AirzoneClimate):
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
hvac_mode = kwargs.get(ATTR_HVAC_MODE)
|
||||
if hvac_mode is not None:
|
||||
await self.async_set_hvac_mode(hvac_mode)
|
||||
|
||||
params: dict[str, Any] = {}
|
||||
if ATTR_TEMPERATURE in kwargs:
|
||||
params[API_PARAMS] = {
|
||||
@@ -376,9 +381,6 @@ class AirzoneDeviceGroupClimate(AirzoneClimate):
|
||||
}
|
||||
await self._async_update_params(params)
|
||||
|
||||
if ATTR_HVAC_MODE in kwargs:
|
||||
await self.async_set_hvac_mode(kwargs[ATTR_HVAC_MODE])
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set hvac mode."""
|
||||
params: dict[str, Any] = {
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone_cloud",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aioairzone_cloud"],
|
||||
"requirements": ["aioairzone-cloud==0.6.8"]
|
||||
"requirements": ["aioairzone-cloud==0.6.10"]
|
||||
}
|
||||
|
@@ -1083,7 +1083,13 @@ async def async_api_arm(
|
||||
arm_state = directive.payload["armState"]
|
||||
data: dict[str, Any] = {ATTR_ENTITY_ID: entity.entity_id}
|
||||
|
||||
if entity.state != alarm_control_panel.AlarmControlPanelState.DISARMED:
|
||||
# Per Alexa Documentation: users are not allowed to switch from armed_away
|
||||
# directly to another armed state without first disarming the system.
|
||||
# https://developer.amazon.com/en-US/docs/alexa/device-apis/alexa-securitypanelcontroller.html#arming
|
||||
if (
|
||||
entity.state == alarm_control_panel.AlarmControlPanelState.ARMED_AWAY
|
||||
and arm_state != "ARMED_AWAY"
|
||||
):
|
||||
msg = "You must disarm the system before you can set the requested arm state."
|
||||
raise AlexaSecurityPanelAuthorizationRequired(msg)
|
||||
|
||||
|
@@ -29,6 +29,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.entity_registry as er
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.system_info import async_get_system_info
|
||||
from homeassistant.loader import (
|
||||
@@ -136,7 +137,7 @@ class Analytics:
|
||||
@property
|
||||
def supervisor(self) -> bool:
|
||||
"""Return bool if a supervisor is present."""
|
||||
return hassio.is_hassio(self.hass)
|
||||
return is_hassio(self.hass)
|
||||
|
||||
async def load(self) -> None:
|
||||
"""Load preferences."""
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "analytics",
|
||||
"name": "Analytics",
|
||||
"after_dependencies": ["energy", "recorder"],
|
||||
"after_dependencies": ["energy", "hassio", "recorder"],
|
||||
"codeowners": ["@home-assistant/core", "@ludeeus"],
|
||||
"dependencies": ["api", "websocket_api"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/analytics",
|
||||
|
@@ -16,7 +16,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -27,6 +26,7 @@ from homeassistant.helpers.selector import (
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
CONF_TRACKED_INTEGRATIONS,
|
||||
DOMAIN,
|
||||
@@ -45,9 +45,11 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> HomeassistantAnalyticsOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return HomeassistantAnalyticsOptionsFlowHandler(config_entry)
|
||||
return HomeassistantAnalyticsOptionsFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -55,8 +57,12 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
if not user_input.get(CONF_TRACKED_INTEGRATIONS) and not user_input.get(
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS
|
||||
if all(
|
||||
[
|
||||
not user_input.get(CONF_TRACKED_ADDONS),
|
||||
not user_input.get(CONF_TRACKED_INTEGRATIONS),
|
||||
not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS),
|
||||
]
|
||||
):
|
||||
errors["base"] = "no_integrations_selected"
|
||||
else:
|
||||
@@ -64,6 +70,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title="Home Assistant Analytics Insights",
|
||||
data={},
|
||||
options={
|
||||
CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []),
|
||||
CONF_TRACKED_INTEGRATIONS: user_input.get(
|
||||
CONF_TRACKED_INTEGRATIONS, []
|
||||
),
|
||||
@@ -77,6 +84,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
session=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations()
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
@@ -99,6 +107,13 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TRACKED_ADDONS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(addons),
|
||||
multiple=True,
|
||||
sort=True,
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_TRACKED_INTEGRATIONS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=options,
|
||||
@@ -118,7 +133,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle Homeassistant Analytics options."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -127,14 +142,19 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
"""Manage the options."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
if not user_input.get(CONF_TRACKED_INTEGRATIONS) and not user_input.get(
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS
|
||||
if all(
|
||||
[
|
||||
not user_input.get(CONF_TRACKED_ADDONS),
|
||||
not user_input.get(CONF_TRACKED_INTEGRATIONS),
|
||||
not user_input.get(CONF_TRACKED_CUSTOM_INTEGRATIONS),
|
||||
]
|
||||
):
|
||||
errors["base"] = "no_integrations_selected"
|
||||
else:
|
||||
return self.async_create_entry(
|
||||
title="",
|
||||
data={
|
||||
CONF_TRACKED_ADDONS: user_input.get(CONF_TRACKED_ADDONS, []),
|
||||
CONF_TRACKED_INTEGRATIONS: user_input.get(
|
||||
CONF_TRACKED_INTEGRATIONS, []
|
||||
),
|
||||
@@ -148,6 +168,7 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
session=async_get_clientsession(self.hass)
|
||||
)
|
||||
try:
|
||||
addons = await client.get_addons()
|
||||
integrations = await client.get_integrations()
|
||||
custom_integrations = await client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError:
|
||||
@@ -168,6 +189,13 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TRACKED_ADDONS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=list(addons),
|
||||
multiple=True,
|
||||
sort=True,
|
||||
)
|
||||
),
|
||||
vol.Optional(CONF_TRACKED_INTEGRATIONS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=options,
|
||||
@@ -184,6 +212,6 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
),
|
||||
},
|
||||
),
|
||||
self.options,
|
||||
self.config_entry.options,
|
||||
),
|
||||
)
|
||||
|
@@ -4,6 +4,7 @@ import logging
|
||||
|
||||
DOMAIN = "analytics_insights"
|
||||
|
||||
CONF_TRACKED_ADDONS = "tracked_addons"
|
||||
CONF_TRACKED_INTEGRATIONS = "tracked_integrations"
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS = "tracked_custom_integrations"
|
||||
|
||||
|
@@ -12,11 +12,13 @@ from python_homeassistant_analytics import (
|
||||
HomeassistantAnalyticsConnectionError,
|
||||
HomeassistantAnalyticsNotModifiedError,
|
||||
)
|
||||
from python_homeassistant_analytics.models import Addon
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import (
|
||||
CONF_TRACKED_ADDONS,
|
||||
CONF_TRACKED_CUSTOM_INTEGRATIONS,
|
||||
CONF_TRACKED_INTEGRATIONS,
|
||||
DOMAIN,
|
||||
@@ -33,6 +35,7 @@ class AnalyticsData:
|
||||
|
||||
active_installations: int
|
||||
reports_integrations: int
|
||||
addons: dict[str, int]
|
||||
core_integrations: dict[str, int]
|
||||
custom_integrations: dict[str, int]
|
||||
|
||||
@@ -53,6 +56,7 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
update_interval=timedelta(hours=12),
|
||||
)
|
||||
self._client = client
|
||||
self._tracked_addons = self.config_entry.options.get(CONF_TRACKED_ADDONS, [])
|
||||
self._tracked_integrations = self.config_entry.options[
|
||||
CONF_TRACKED_INTEGRATIONS
|
||||
]
|
||||
@@ -62,6 +66,7 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
|
||||
async def _async_update_data(self) -> AnalyticsData:
|
||||
try:
|
||||
addons_data = await self._client.get_addons()
|
||||
data = await self._client.get_current_analytics()
|
||||
custom_data = await self._client.get_custom_integrations()
|
||||
except HomeassistantAnalyticsConnectionError as err:
|
||||
@@ -70,6 +75,9 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
) from err
|
||||
except HomeassistantAnalyticsNotModifiedError:
|
||||
return self.data
|
||||
addons = {
|
||||
addon: get_addon_value(addons_data, addon) for addon in self._tracked_addons
|
||||
}
|
||||
core_integrations = {
|
||||
integration: data.integrations.get(integration, 0)
|
||||
for integration in self._tracked_integrations
|
||||
@@ -81,11 +89,19 @@ class HomeassistantAnalyticsDataUpdateCoordinator(DataUpdateCoordinator[Analytic
|
||||
return AnalyticsData(
|
||||
data.active_installations,
|
||||
data.reports_integrations,
|
||||
addons,
|
||||
core_integrations,
|
||||
custom_integrations,
|
||||
)
|
||||
|
||||
|
||||
def get_addon_value(data: dict[str, Addon], name_slug: str) -> int:
|
||||
"""Get addon value."""
|
||||
if name_slug in data:
|
||||
return data[name_slug].total
|
||||
return 0
|
||||
|
||||
|
||||
def get_custom_integration_value(
|
||||
data: dict[str, CustomIntegration], domain: str
|
||||
) -> int:
|
||||
|
@@ -29,6 +29,20 @@ class AnalyticsSensorEntityDescription(SensorEntityDescription):
|
||||
value_fn: Callable[[AnalyticsData], StateType]
|
||||
|
||||
|
||||
def get_addon_entity_description(
|
||||
name_slug: str,
|
||||
) -> AnalyticsSensorEntityDescription:
|
||||
"""Get addon entity description."""
|
||||
return AnalyticsSensorEntityDescription(
|
||||
key=f"addon_{name_slug}_active_installations",
|
||||
translation_key="addons",
|
||||
name=name_slug,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
native_unit_of_measurement="active installations",
|
||||
value_fn=lambda data: data.addons.get(name_slug),
|
||||
)
|
||||
|
||||
|
||||
def get_core_integration_entity_description(
|
||||
domain: str, name: str
|
||||
) -> AnalyticsSensorEntityDescription:
|
||||
@@ -89,6 +103,13 @@ async def async_setup_entry(
|
||||
analytics_data.coordinator
|
||||
)
|
||||
entities: list[HomeassistantAnalyticsSensor] = []
|
||||
entities.extend(
|
||||
HomeassistantAnalyticsSensor(
|
||||
coordinator,
|
||||
get_addon_entity_description(addon_name_slug),
|
||||
)
|
||||
for addon_name_slug in coordinator.data.addons
|
||||
)
|
||||
entities.extend(
|
||||
HomeassistantAnalyticsSensor(
|
||||
coordinator,
|
||||
|
@@ -3,10 +3,12 @@
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"tracked_addons": "Addons",
|
||||
"tracked_integrations": "Integrations",
|
||||
"tracked_custom_integrations": "Custom integrations"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_addons": "Select the addons you want to track",
|
||||
"tracked_integrations": "Select the integrations you want to track",
|
||||
"tracked_custom_integrations": "Select the custom integrations you want to track"
|
||||
}
|
||||
@@ -24,10 +26,12 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"tracked_addons": "[%key:component::analytics_insights::config::step::user::data::tracked_addons%]",
|
||||
"tracked_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_integrations%]",
|
||||
"tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data::tracked_custom_integrations%]"
|
||||
},
|
||||
"data_description": {
|
||||
"tracked_addons": "[%key:component::analytics_insights::config::step::user::data_description::tracked_addons%]",
|
||||
"tracked_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_integrations%]",
|
||||
"tracked_custom_integrations": "[%key:component::analytics_insights::config::step::user::data_description::tracked_custom_integrations%]"
|
||||
}
|
||||
|
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
@@ -40,6 +41,7 @@ from .const import (
|
||||
CONF_ADB_SERVER_IP,
|
||||
CONF_ADB_SERVER_PORT,
|
||||
CONF_ADBKEY,
|
||||
CONF_SCREENCAP_INTERVAL,
|
||||
CONF_STATE_DETECTION_RULES,
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEVICE_ANDROIDTV,
|
||||
@@ -66,6 +68,8 @@ RELOAD_OPTIONS = [CONF_STATE_DETECTION_RULES]
|
||||
|
||||
_INVALID_MACS = {"ff:ff:ff:ff:ff:ff"}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class AndroidTVRuntimeData:
|
||||
@@ -157,6 +161,32 @@ async def async_connect_androidtv(
|
||||
return aftv, None
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s", entry.version, entry.minor_version
|
||||
)
|
||||
|
||||
if entry.version == 1:
|
||||
new_options = {**entry.options}
|
||||
|
||||
# Migrate MinorVersion 1 -> MinorVersion 2: New option
|
||||
if entry.minor_version < 2:
|
||||
new_options = {**new_options, CONF_SCREENCAP_INTERVAL: 0}
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, options=new_options, minor_version=2, version=1
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to configuration version %s.%s successful",
|
||||
entry.version,
|
||||
entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AndroidTVConfigEntry) -> bool:
|
||||
"""Set up Android Debug Bridge platform."""
|
||||
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
@@ -34,7 +34,7 @@ from .const import (
|
||||
CONF_APPS,
|
||||
CONF_EXCLUDE_UNNAMED_APPS,
|
||||
CONF_GET_SOURCES,
|
||||
CONF_SCREENCAP,
|
||||
CONF_SCREENCAP_INTERVAL,
|
||||
CONF_STATE_DETECTION_RULES,
|
||||
CONF_TURN_OFF_COMMAND,
|
||||
CONF_TURN_ON_COMMAND,
|
||||
@@ -43,7 +43,7 @@ from .const import (
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
||||
DEFAULT_GET_SOURCES,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCREENCAP,
|
||||
DEFAULT_SCREENCAP_INTERVAL,
|
||||
DEVICE_CLASSES,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
@@ -76,6 +76,7 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
@callback
|
||||
def _show_setup_form(
|
||||
@@ -185,16 +186,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return OptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle an option flow for Android Debug Bridge."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
super().__init__(config_entry)
|
||||
|
||||
self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {})
|
||||
self._state_det_rules: dict[str, Any] = self.options.setdefault(
|
||||
CONF_STATE_DETECTION_RULES, {}
|
||||
self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {}))
|
||||
self._state_det_rules: dict[str, Any] = dict(
|
||||
config_entry.options.get(CONF_STATE_DETECTION_RULES, {})
|
||||
)
|
||||
self._conf_app_id: str | None = None
|
||||
self._conf_rule_id: str | None = None
|
||||
@@ -236,7 +235,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
SelectOptionDict(value=k, label=v) for k, v in apps_list.items()
|
||||
]
|
||||
rules = [RULES_NEW_ID, *self._state_det_rules]
|
||||
options = self.options
|
||||
options = self.config_entry.options
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
@@ -253,10 +252,12 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
CONF_EXCLUDE_UNNAMED_APPS, DEFAULT_EXCLUDE_UNNAMED_APPS
|
||||
),
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_SCREENCAP,
|
||||
default=options.get(CONF_SCREENCAP, DEFAULT_SCREENCAP),
|
||||
): bool,
|
||||
vol.Required(
|
||||
CONF_SCREENCAP_INTERVAL,
|
||||
default=options.get(
|
||||
CONF_SCREENCAP_INTERVAL, DEFAULT_SCREENCAP_INTERVAL
|
||||
),
|
||||
): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=15)),
|
||||
vol.Optional(
|
||||
CONF_TURN_OFF_COMMAND,
|
||||
description={
|
||||
|
@@ -9,6 +9,7 @@ CONF_APPS = "apps"
|
||||
CONF_EXCLUDE_UNNAMED_APPS = "exclude_unnamed_apps"
|
||||
CONF_GET_SOURCES = "get_sources"
|
||||
CONF_SCREENCAP = "screencap"
|
||||
CONF_SCREENCAP_INTERVAL = "screencap_interval"
|
||||
CONF_STATE_DETECTION_RULES = "state_detection_rules"
|
||||
CONF_TURN_OFF_COMMAND = "turn_off_command"
|
||||
CONF_TURN_ON_COMMAND = "turn_on_command"
|
||||
@@ -18,7 +19,7 @@ DEFAULT_DEVICE_CLASS = "auto"
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
||||
DEFAULT_GET_SOURCES = True
|
||||
DEFAULT_PORT = 5555
|
||||
DEFAULT_SCREENCAP = True
|
||||
DEFAULT_SCREENCAP_INTERVAL = 5
|
||||
|
||||
DEVICE_ANDROIDTV = "androidtv"
|
||||
DEVICE_FIRETV = "firetv"
|
||||
|
@@ -2,10 +2,9 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from datetime import datetime, timedelta
|
||||
import hashlib
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from androidtv.constants import APPS, KEYS
|
||||
from androidtv.setup_async import AndroidTVAsync, FireTVAsync
|
||||
@@ -23,19 +22,19 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import Throttle
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import AndroidTVConfigEntry
|
||||
from .const import (
|
||||
CONF_APPS,
|
||||
CONF_EXCLUDE_UNNAMED_APPS,
|
||||
CONF_GET_SOURCES,
|
||||
CONF_SCREENCAP,
|
||||
CONF_SCREENCAP_INTERVAL,
|
||||
CONF_TURN_OFF_COMMAND,
|
||||
CONF_TURN_ON_COMMAND,
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
||||
DEFAULT_GET_SOURCES,
|
||||
DEFAULT_SCREENCAP,
|
||||
DEFAULT_SCREENCAP_INTERVAL,
|
||||
DEVICE_ANDROIDTV,
|
||||
SIGNAL_CONFIG_ENTITY,
|
||||
)
|
||||
@@ -48,8 +47,6 @@ ATTR_DEVICE_PATH = "device_path"
|
||||
ATTR_HDMI_INPUT = "hdmi_input"
|
||||
ATTR_LOCAL_PATH = "local_path"
|
||||
|
||||
MIN_TIME_BETWEEN_SCREENCAPS = timedelta(seconds=60)
|
||||
|
||||
SERVICE_ADB_COMMAND = "adb_command"
|
||||
SERVICE_DOWNLOAD = "download"
|
||||
SERVICE_LEARN_SENDEVENT = "learn_sendevent"
|
||||
@@ -125,7 +122,8 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
self._app_name_to_id: dict[str, str] = {}
|
||||
self._get_sources = DEFAULT_GET_SOURCES
|
||||
self._exclude_unnamed_apps = DEFAULT_EXCLUDE_UNNAMED_APPS
|
||||
self._screencap = DEFAULT_SCREENCAP
|
||||
self._screencap_delta: timedelta | None = None
|
||||
self._last_screencap: datetime | None = None
|
||||
self.turn_on_command: str | None = None
|
||||
self.turn_off_command: str | None = None
|
||||
|
||||
@@ -159,7 +157,13 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
self._exclude_unnamed_apps = options.get(
|
||||
CONF_EXCLUDE_UNNAMED_APPS, DEFAULT_EXCLUDE_UNNAMED_APPS
|
||||
)
|
||||
self._screencap = options.get(CONF_SCREENCAP, DEFAULT_SCREENCAP)
|
||||
screencap_interval: int = options.get(
|
||||
CONF_SCREENCAP_INTERVAL, DEFAULT_SCREENCAP_INTERVAL
|
||||
)
|
||||
if screencap_interval > 0:
|
||||
self._screencap_delta = timedelta(minutes=screencap_interval)
|
||||
else:
|
||||
self._screencap_delta = None
|
||||
self.turn_off_command = options.get(CONF_TURN_OFF_COMMAND)
|
||||
self.turn_on_command = options.get(CONF_TURN_ON_COMMAND)
|
||||
|
||||
@@ -183,7 +187,7 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
async def _async_get_screencap(self, prev_app_id: str | None = None) -> None:
|
||||
"""Take a screen capture from the device when enabled."""
|
||||
if (
|
||||
not self._screencap
|
||||
not self._screencap_delta
|
||||
or self.state in {MediaPlayerState.OFF, None}
|
||||
or not self.available
|
||||
):
|
||||
@@ -193,11 +197,18 @@ class ADBDevice(AndroidTVEntity, MediaPlayerEntity):
|
||||
force: bool = prev_app_id is not None
|
||||
if force:
|
||||
force = prev_app_id != self._attr_app_id
|
||||
await self._adb_get_screencap(no_throttle=force)
|
||||
await self._adb_get_screencap(force)
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_SCREENCAPS)
|
||||
async def _adb_get_screencap(self, **kwargs: Any) -> None:
|
||||
"""Take a screen capture from the device every 60 seconds."""
|
||||
async def _adb_get_screencap(self, force: bool = False) -> None:
|
||||
"""Take a screen capture from the device every configured minutes."""
|
||||
time_elapsed = self._screencap_delta is not None and (
|
||||
self._last_screencap is None
|
||||
or (utcnow() - self._last_screencap) >= self._screencap_delta
|
||||
)
|
||||
if not (force or time_elapsed):
|
||||
return
|
||||
|
||||
self._last_screencap = utcnow()
|
||||
if media_data := await self._adb_screencap():
|
||||
self._media_image = media_data, "image/png"
|
||||
self._attr_media_image_hash = hashlib.sha256(media_data).hexdigest()[:16]
|
||||
|
@@ -31,7 +31,7 @@
|
||||
"apps": "Configure applications list",
|
||||
"get_sources": "Retrieve the running apps as the list of sources",
|
||||
"exclude_unnamed_apps": "Exclude apps with unknown name from the sources list",
|
||||
"screencap": "Use screen capture for album art",
|
||||
"screencap_interval": "Interval in minutes between screen capture for album art (set 0 to disable)",
|
||||
"state_detection_rules": "Configure state detection rules",
|
||||
"turn_off_command": "ADB shell turn off command (leave empty for default)",
|
||||
"turn_on_command": "ADB shell turn on command (leave empty for default)"
|
||||
|
@@ -20,7 +20,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
@@ -221,13 +221,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return AndroidTVRemoteOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class AndroidTVRemoteOptionsFlowHandler(OptionsFlow):
|
||||
"""Android TV Remote options flow."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
super().__init__(config_entry)
|
||||
self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {})
|
||||
self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {}))
|
||||
self._conf_app_id: str | None = None
|
||||
|
||||
@callback
|
||||
|
@@ -121,7 +121,6 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
self.last_rendered_recommended = config_entry.options.get(
|
||||
CONF_RECOMMENDED, False
|
||||
)
|
||||
|
@@ -15,12 +15,14 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AranetConfigEntry = ConfigEntry[
|
||||
PassiveBluetoothProcessorCoordinator[Aranet4Advertisement]
|
||||
]
|
||||
|
||||
|
||||
def _service_info_to_adv(
|
||||
service_info: BluetoothServiceInfoBleak,
|
||||
@@ -28,30 +30,25 @@ def _service_info_to_adv(
|
||||
return Aranet4Advertisement(service_info.device, service_info.advertisement)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AranetConfigEntry) -> bool:
|
||||
"""Set up Aranet from a config entry."""
|
||||
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
coordinator = hass.data.setdefault(DOMAIN, {})[entry.entry_id] = (
|
||||
PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=_service_info_to_adv,
|
||||
)
|
||||
coordinator = PassiveBluetoothProcessorCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
address=address,
|
||||
mode=BluetoothScanningMode.PASSIVE,
|
||||
update_method=_service_info_to_adv,
|
||||
)
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(
|
||||
coordinator.async_start()
|
||||
) # only start after all platforms have had a chance to subscribe
|
||||
# only start after all platforms have had a chance to subscribe
|
||||
entry.async_on_unload(coordinator.async_start())
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: AranetConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -8,12 +8,10 @@ from typing import Any
|
||||
from aranet4.client import Aranet4Advertisement
|
||||
from bleak.backends.device import BLEDevice
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.components.bluetooth.passive_update_processor import (
|
||||
PassiveBluetoothDataProcessor,
|
||||
PassiveBluetoothDataUpdate,
|
||||
PassiveBluetoothEntityKey,
|
||||
PassiveBluetoothProcessorCoordinator,
|
||||
PassiveBluetoothProcessorEntity,
|
||||
)
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -38,7 +36,8 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import ARANET_MANUFACTURER_NAME, DOMAIN
|
||||
from . import AranetConfigEntry
|
||||
from .const import ARANET_MANUFACTURER_NAME
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -174,20 +173,17 @@ def sensor_update_to_bluetooth_data_update(
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: config_entries.ConfigEntry,
|
||||
entry: AranetConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Aranet sensors."""
|
||||
coordinator: PassiveBluetoothProcessorCoordinator[Aranet4Advertisement] = hass.data[
|
||||
DOMAIN
|
||||
][entry.entry_id]
|
||||
processor = PassiveBluetoothDataProcessor(sensor_update_to_bluetooth_data_update)
|
||||
entry.async_on_unload(
|
||||
processor.async_add_entities_listener(
|
||||
Aranet4BluetoothSensorEntity, async_add_entities
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(coordinator.async_register_processor(processor))
|
||||
entry.async_on_unload(entry.runtime_data.async_register_processor(processor))
|
||||
|
||||
|
||||
class Aranet4BluetoothSensorEntity(
|
||||
|
@@ -22,8 +22,8 @@ class EnhancedAudioChunk:
|
||||
timestamp_ms: int
|
||||
"""Timestamp relative to start of audio stream (milliseconds)"""
|
||||
|
||||
is_speech: bool | None
|
||||
"""True if audio chunk likely contains speech, False if not, None if unknown"""
|
||||
speech_probability: float | None
|
||||
"""Probability that audio chunk contains speech (0-1), None if unknown"""
|
||||
|
||||
|
||||
class AudioEnhancer(ABC):
|
||||
@@ -70,27 +70,27 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
)
|
||||
|
||||
self.vad: MicroVad | None = None
|
||||
self.threshold = 0.5
|
||||
|
||||
if self.is_vad_enabled:
|
||||
self.vad = MicroVad()
|
||||
_LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold)
|
||||
_LOGGER.debug("Initialized microVAD")
|
||||
|
||||
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
is_speech: bool | None = None
|
||||
speech_probability: float | None = None
|
||||
|
||||
assert len(audio) == BYTES_PER_CHUNK
|
||||
|
||||
if self.vad is not None:
|
||||
# Run VAD
|
||||
speech_prob = self.vad.Process10ms(audio)
|
||||
is_speech = speech_prob > self.threshold
|
||||
speech_probability = self.vad.Process10ms(audio)
|
||||
|
||||
if self.audio_processor is not None:
|
||||
# Run noise suppression and auto gain
|
||||
audio = self.audio_processor.Process10ms(audio).audio
|
||||
|
||||
return EnhancedAudioChunk(
|
||||
audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech
|
||||
audio=audio,
|
||||
timestamp_ms=timestamp_ms,
|
||||
speech_probability=speech_probability,
|
||||
)
|
||||
|
@@ -780,7 +780,9 @@ class PipelineRun:
|
||||
# speaking the voice command.
|
||||
audio_chunks_for_stt.extend(
|
||||
EnhancedAudioChunk(
|
||||
audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False
|
||||
audio=chunk_ts[0],
|
||||
timestamp_ms=chunk_ts[1],
|
||||
speech_probability=None,
|
||||
)
|
||||
for chunk_ts in result.queued_audio
|
||||
)
|
||||
@@ -827,7 +829,7 @@ class PipelineRun:
|
||||
|
||||
if wake_word_vad is not None:
|
||||
chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate
|
||||
if not wake_word_vad.process(chunk_seconds, chunk.is_speech):
|
||||
if not wake_word_vad.process(chunk_seconds, chunk.speech_probability):
|
||||
raise WakeWordTimeoutError(
|
||||
code="wake-word-timeout", message="Wake word was not detected"
|
||||
)
|
||||
@@ -955,7 +957,7 @@ class PipelineRun:
|
||||
|
||||
if stt_vad is not None:
|
||||
chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate
|
||||
if not stt_vad.process(chunk_seconds, chunk.is_speech):
|
||||
if not stt_vad.process(chunk_seconds, chunk.speech_probability):
|
||||
# Silence detected at the end of voice command
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
@@ -1221,7 +1223,7 @@ class PipelineRun:
|
||||
yield EnhancedAudioChunk(
|
||||
audio=sub_chunk,
|
||||
timestamp_ms=timestamp_ms,
|
||||
is_speech=None, # no VAD
|
||||
speech_probability=None, # no VAD
|
||||
)
|
||||
timestamp_ms += MS_PER_CHUNK
|
||||
|
||||
|
@@ -75,7 +75,7 @@ class AudioBuffer:
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands."""
|
||||
|
||||
speech_seconds: float = 0.3
|
||||
speech_seconds: float = 0.1
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
command_seconds: float = 1.0
|
||||
@@ -96,6 +96,12 @@ class VoiceCommandSegmenter:
|
||||
timed_out: bool = False
|
||||
"""True a timeout occurred during voice command."""
|
||||
|
||||
before_command_speech_threshold: float = 0.2
|
||||
"""Probability threshold for speech before voice command."""
|
||||
|
||||
in_command_speech_threshold: float = 0.5
|
||||
"""Probability threshold for speech during voice command."""
|
||||
|
||||
_speech_seconds_left: float = 0.0
|
||||
"""Seconds left before considering voice command as started."""
|
||||
|
||||
@@ -124,7 +130,7 @@ class VoiceCommandSegmenter:
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
self.in_command = False
|
||||
|
||||
def process(self, chunk_seconds: float, is_speech: bool | None) -> bool:
|
||||
def process(self, chunk_seconds: float, speech_probability: float | None) -> bool:
|
||||
"""Process samples using external VAD.
|
||||
|
||||
Returns False when command is done.
|
||||
@@ -142,7 +148,12 @@ class VoiceCommandSegmenter:
|
||||
self.timed_out = True
|
||||
return False
|
||||
|
||||
if speech_probability is None:
|
||||
speech_probability = 0.0
|
||||
|
||||
if not self.in_command:
|
||||
# Before command
|
||||
is_speech = speech_probability > self.before_command_speech_threshold
|
||||
if is_speech:
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
self._speech_seconds_left -= chunk_seconds
|
||||
@@ -160,24 +171,29 @@ class VoiceCommandSegmenter:
|
||||
if self._reset_seconds_left <= 0:
|
||||
self._speech_seconds_left = self.speech_seconds
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
elif not is_speech:
|
||||
# Silence in command
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
self._silence_seconds_left -= chunk_seconds
|
||||
self._command_seconds_left -= chunk_seconds
|
||||
if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0):
|
||||
# Command finished successfully
|
||||
self.reset()
|
||||
_LOGGER.debug("Voice command finished")
|
||||
return False
|
||||
else:
|
||||
# Speech in command.
|
||||
# Reset silence counter if enough speech.
|
||||
self._reset_seconds_left -= chunk_seconds
|
||||
self._command_seconds_left -= chunk_seconds
|
||||
if self._reset_seconds_left <= 0:
|
||||
self._silence_seconds_left = self.silence_seconds
|
||||
# In command
|
||||
is_speech = speech_probability > self.in_command_speech_threshold
|
||||
if not is_speech:
|
||||
# Silence in command
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
self._silence_seconds_left -= chunk_seconds
|
||||
self._command_seconds_left -= chunk_seconds
|
||||
if (self._silence_seconds_left <= 0) and (
|
||||
self._command_seconds_left <= 0
|
||||
):
|
||||
# Command finished successfully
|
||||
self.reset()
|
||||
_LOGGER.debug("Voice command finished")
|
||||
return False
|
||||
else:
|
||||
# Speech in command.
|
||||
# Reset silence counter if enough speech.
|
||||
self._reset_seconds_left -= chunk_seconds
|
||||
self._command_seconds_left -= chunk_seconds
|
||||
if self._reset_seconds_left <= 0:
|
||||
self._silence_seconds_left = self.silence_seconds
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
|
||||
return True
|
||||
|
||||
@@ -226,6 +242,9 @@ class VoiceActivityTimeout:
|
||||
reset_seconds: float = 0.5
|
||||
"""Seconds of speech before resetting timeout."""
|
||||
|
||||
speech_threshold: float = 0.5
|
||||
"""Threshold for speech."""
|
||||
|
||||
_silence_seconds_left: float = 0.0
|
||||
"""Seconds left before considering voice command as stopped."""
|
||||
|
||||
@@ -241,12 +260,15 @@ class VoiceActivityTimeout:
|
||||
self._silence_seconds_left = self.silence_seconds
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
|
||||
def process(self, chunk_seconds: float, is_speech: bool | None) -> bool:
|
||||
def process(self, chunk_seconds: float, speech_probability: float | None) -> bool:
|
||||
"""Process samples using external VAD.
|
||||
|
||||
Returns False when timeout is reached.
|
||||
"""
|
||||
if is_speech:
|
||||
if speech_probability is None:
|
||||
speech_probability = 0.0
|
||||
|
||||
if speech_probability > self.speech_threshold:
|
||||
# Speech
|
||||
self._reset_seconds_left -= chunk_seconds
|
||||
if self._reset_seconds_left <= 0:
|
||||
|
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/autarco",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["autarco==3.0.0"]
|
||||
"requirements": ["autarco==3.1.0"]
|
||||
}
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -59,9 +59,11 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> AxisOptionsFlowHandler:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> AxisOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return AxisOptionsFlowHandler(config_entry)
|
||||
return AxisOptionsFlowHandler()
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the Axis config flow."""
|
||||
@@ -264,7 +266,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
return await self.async_step_user()
|
||||
|
||||
|
||||
class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class AxisOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle Axis device options."""
|
||||
|
||||
config_entry: AxisConfigEntry
|
||||
@@ -282,8 +284,7 @@ class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the Axis device stream options."""
|
||||
if user_input is not None:
|
||||
self.options.update(user_input)
|
||||
return self.async_create_entry(title="", data=self.options)
|
||||
return self.async_create_entry(data=self.config_entry.options | user_input)
|
||||
|
||||
schema = {}
|
||||
|
||||
|
@@ -124,7 +124,9 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id=STEP_CONN_STRING,
|
||||
data_schema=CONN_STRING_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders=self._data[CONF_EVENT_HUB_INSTANCE_NAME],
|
||||
description_placeholders={
|
||||
"event_hub_instance_name": self._data[CONF_EVENT_HUB_INSTANCE_NAME]
|
||||
},
|
||||
last_step=True,
|
||||
)
|
||||
|
||||
@@ -144,7 +146,9 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id=STEP_SAS,
|
||||
data_schema=SAS_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders=self._data[CONF_EVENT_HUB_INSTANCE_NAME],
|
||||
description_placeholders={
|
||||
"event_hub_instance_name": self._data[CONF_EVENT_HUB_INSTANCE_NAME]
|
||||
},
|
||||
last_step=True,
|
||||
)
|
||||
|
||||
|
@@ -1,8 +1,8 @@
|
||||
"""The Backup integration."""
|
||||
|
||||
from homeassistant.components.hassio import is_hassio
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DATA_MANAGER, DOMAIN, LOGGER
|
||||
|
@@ -17,6 +17,7 @@ LOGGER = getLogger(__package__)
|
||||
EXCLUDE_FROM_BACKUP = [
|
||||
"__pycache__/*",
|
||||
".DS_Store",
|
||||
".HA_RESTORE",
|
||||
"*.db-shm",
|
||||
"*.log.*",
|
||||
"*.log",
|
||||
|
@@ -16,6 +16,7 @@ from typing import Any, Protocol, cast
|
||||
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
|
||||
from homeassistant.backup_restore import RESTORE_BACKUP_FILE
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -123,6 +124,10 @@ class BaseBackupManager(abc.ABC):
|
||||
LOGGER.debug("Loaded %s platforms", len(self.platforms))
|
||||
self.loaded_platforms = True
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_restore_backup(self, slug: str, **kwargs: Any) -> None:
|
||||
"""Restore a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_create_backup(self, **kwargs: Any) -> Backup:
|
||||
"""Generate a backup."""
|
||||
@@ -291,6 +296,25 @@ class BackupManager(BaseBackupManager):
|
||||
|
||||
return tar_file_path.stat().st_size
|
||||
|
||||
async def async_restore_backup(self, slug: str, **kwargs: Any) -> None:
|
||||
"""Restore a backup.
|
||||
|
||||
This will write the restore information to .HA_RESTORE which
|
||||
will be handled during startup by the restore_backup module.
|
||||
"""
|
||||
if (backup := await self.async_get_backup(slug=slug)) is None:
|
||||
raise HomeAssistantError(f"Backup {slug} not found")
|
||||
|
||||
def _write_restore_file() -> None:
|
||||
"""Write the restore file."""
|
||||
Path(self.hass.config.path(RESTORE_BACKUP_FILE)).write_text(
|
||||
json.dumps({"path": backup.path.as_posix()}),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
await self.hass.async_add_executor_job(_write_restore_file)
|
||||
await self.hass.services.async_call("homeassistant", "restart", {})
|
||||
|
||||
|
||||
def _generate_slug(date: str, name: str) -> str:
|
||||
"""Generate a backup slug."""
|
||||
|
@@ -22,6 +22,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
websocket_api.async_register_command(hass, handle_info)
|
||||
websocket_api.async_register_command(hass, handle_create)
|
||||
websocket_api.async_register_command(hass, handle_remove)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@@ -85,6 +86,24 @@ async def handle_remove(
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/restore",
|
||||
vol.Required("slug"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_restore(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"])
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/generate"})
|
||||
@websocket_api.async_response
|
||||
|
@@ -7,20 +7,19 @@ from typing import Final
|
||||
|
||||
from mozart_api.models import Source, SourceArray, SourceTypeEnum
|
||||
|
||||
from homeassistant.components.media_player import MediaPlayerState, MediaType
|
||||
from homeassistant.components.media_player import (
|
||||
MediaPlayerState,
|
||||
MediaType,
|
||||
RepeatMode,
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenSource:
|
||||
"""Class used for associating device source ids with friendly names. May not include all sources."""
|
||||
|
||||
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
|
||||
BLUETOOTH: Final[Source] = Source(name="Bluetooth", id="bluetooth")
|
||||
CHROMECAST: Final[Source] = Source(name="Chromecast built-in", id="chromeCast")
|
||||
LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn")
|
||||
SPDIF: Final[Source] = Source(name="Optical", id="spdif")
|
||||
NET_RADIO: Final[Source] = Source(name="B&O Radio", id="netRadio")
|
||||
DEEZER: Final[Source] = Source(name="Deezer", id="deezer")
|
||||
TIDAL: Final[Source] = Source(name="Tidal", id="tidal")
|
||||
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
|
||||
|
||||
|
||||
BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = {
|
||||
@@ -36,6 +35,17 @@ BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = {
|
||||
"unknown": MediaPlayerState.IDLE,
|
||||
}
|
||||
|
||||
# Dict used for translating Home Assistant settings to device repeat settings.
|
||||
BANG_OLUFSEN_REPEAT_FROM_HA: dict[RepeatMode, str] = {
|
||||
RepeatMode.ALL: "all",
|
||||
RepeatMode.ONE: "track",
|
||||
RepeatMode.OFF: "none",
|
||||
}
|
||||
# Dict used for translating device repeat settings to Home Assistant settings.
|
||||
BANG_OLUFSEN_REPEAT_TO_HA: dict[str, RepeatMode] = {
|
||||
value: key for key, value in BANG_OLUFSEN_REPEAT_FROM_HA.items()
|
||||
}
|
||||
|
||||
|
||||
# Media types for play_media
|
||||
class BangOlufsenMediaType(StrEnum):
|
||||
@@ -123,20 +133,6 @@ VALID_MEDIA_TYPES: Final[tuple] = (
|
||||
MediaType.CHANNEL,
|
||||
)
|
||||
|
||||
# Sources on the device that should not be selectable by the user
|
||||
HIDDEN_SOURCE_IDS: Final[tuple] = (
|
||||
"airPlay",
|
||||
"bluetooth",
|
||||
"chromeCast",
|
||||
"generator",
|
||||
"local",
|
||||
"dlna",
|
||||
"qplay",
|
||||
"wpl",
|
||||
"pl",
|
||||
"beolink",
|
||||
"usbIn",
|
||||
)
|
||||
|
||||
# Fallback sources to use in case of API failure.
|
||||
FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
@@ -144,23 +140,26 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
Source(
|
||||
id="uriStreamer",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Audio Streamer",
|
||||
type=SourceTypeEnum(value="uriStreamer"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source(
|
||||
id="bluetooth",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Bluetooth",
|
||||
type=SourceTypeEnum(value="bluetooth"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source(
|
||||
id="spotify",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Spotify Connect",
|
||||
type=SourceTypeEnum(value="spotify"),
|
||||
is_seekable=True,
|
||||
),
|
||||
Source(
|
||||
id="lineIn",
|
||||
@@ -168,6 +167,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
is_playable=True,
|
||||
name="Line-In",
|
||||
type=SourceTypeEnum(value="lineIn"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source(
|
||||
id="spdif",
|
||||
@@ -175,6 +175,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
is_playable=True,
|
||||
name="Optical",
|
||||
type=SourceTypeEnum(value="spdif"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source(
|
||||
id="netRadio",
|
||||
@@ -182,6 +183,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
is_playable=True,
|
||||
name="B&O Radio",
|
||||
type=SourceTypeEnum(value="netRadio"),
|
||||
is_seekable=False,
|
||||
),
|
||||
Source(
|
||||
id="deezer",
|
||||
@@ -189,6 +191,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
is_playable=True,
|
||||
name="Deezer",
|
||||
type=SourceTypeEnum(value="deezer"),
|
||||
is_seekable=True,
|
||||
),
|
||||
Source(
|
||||
id="tidalConnect",
|
||||
@@ -196,6 +199,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
is_playable=True,
|
||||
name="Tidal Connect",
|
||||
type=SourceTypeEnum(value="tidalConnect"),
|
||||
is_seekable=True,
|
||||
),
|
||||
]
|
||||
)
|
||||
|
9
homeassistant/components/bang_olufsen/icons.json
Normal file
9
homeassistant/components/bang_olufsen/icons.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"services": {
|
||||
"beolink_join": { "service": "mdi:location-enter" },
|
||||
"beolink_expand": { "service": "mdi:location-enter" },
|
||||
"beolink_unexpand": { "service": "mdi:location-exit" },
|
||||
"beolink_leave": { "service": "mdi:close-circle-outline" },
|
||||
"beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" }
|
||||
}
|
||||
}
|
@@ -3,12 +3,15 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import contextlib
|
||||
from datetime import timedelta
|
||||
import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api import __version__ as MOZART_API_VERSION
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.exceptions import ApiException, NotFoundException
|
||||
from mozart_api.models import (
|
||||
Action,
|
||||
Art,
|
||||
@@ -22,6 +25,7 @@ from mozart_api.models import (
|
||||
PlaybackProgress,
|
||||
PlayQueueItem,
|
||||
PlayQueueItemType,
|
||||
PlayQueueSettings,
|
||||
RenderingState,
|
||||
SceneProperties,
|
||||
SoftwareUpdateState,
|
||||
@@ -34,6 +38,7 @@ from mozart_api.models import (
|
||||
VolumeState,
|
||||
)
|
||||
from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -44,26 +49,35 @@ from homeassistant.components.media_player import (
|
||||
MediaPlayerEntityFeature,
|
||||
MediaPlayerState,
|
||||
MediaType,
|
||||
RepeatMode,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_MODEL, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import (
|
||||
BANG_OLUFSEN_REPEAT_FROM_HA,
|
||||
BANG_OLUFSEN_REPEAT_TO_HA,
|
||||
BANG_OLUFSEN_STATES,
|
||||
CONF_BEOLINK_JID,
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
FALLBACK_SOURCES,
|
||||
HIDDEN_SOURCE_IDS,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
@@ -72,6 +86,8 @@ from .const import (
|
||||
from .entity import BangOlufsenEntity
|
||||
from .util import get_serial_number_from_jid
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
BANG_OLUFSEN_FEATURES = (
|
||||
@@ -84,8 +100,9 @@ BANG_OLUFSEN_FEATURES = (
|
||||
| MediaPlayerEntityFeature.PLAY
|
||||
| MediaPlayerEntityFeature.PLAY_MEDIA
|
||||
| MediaPlayerEntityFeature.PREVIOUS_TRACK
|
||||
| MediaPlayerEntityFeature.SEEK
|
||||
| MediaPlayerEntityFeature.REPEAT_SET
|
||||
| MediaPlayerEntityFeature.SELECT_SOURCE
|
||||
| MediaPlayerEntityFeature.SHUFFLE_SET
|
||||
| MediaPlayerEntityFeature.STOP
|
||||
| MediaPlayerEntityFeature.TURN_OFF
|
||||
| MediaPlayerEntityFeature.VOLUME_MUTE
|
||||
@@ -107,6 +124,58 @@ async def async_setup_entry(
|
||||
]
|
||||
)
|
||||
|
||||
# Register actions.
|
||||
platform = async_get_current_platform()
|
||||
|
||||
jid_regex = vol.Match(
|
||||
r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$"
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_join",
|
||||
schema={vol.Optional("beolink_jid"): jid_regex},
|
||||
func="async_beolink_join",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_expand",
|
||||
schema={
|
||||
vol.Exclusive("all_discovered", "devices", ""): cv.boolean,
|
||||
vol.Exclusive(
|
||||
"beolink_jids",
|
||||
"devices",
|
||||
"Define either specific Beolink JIDs or all discovered",
|
||||
): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_expand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_unexpand",
|
||||
schema={
|
||||
vol.Required("beolink_jids"): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_unexpand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_leave",
|
||||
schema=None,
|
||||
func="async_beolink_leave",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_allstandby",
|
||||
schema=None,
|
||||
func="async_beolink_allstandby",
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Representation of a media player."""
|
||||
@@ -114,7 +183,6 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
_attr_icon = "mdi:speaker-wireless"
|
||||
_attr_name = None
|
||||
_attr_device_class = MediaPlayerDeviceClass.SPEAKER
|
||||
_attr_supported_features = BANG_OLUFSEN_FEATURES
|
||||
|
||||
def __init__(self, entry: ConfigEntry, client: MozartClient) -> None:
|
||||
"""Initialize the media player."""
|
||||
@@ -131,6 +199,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
serial_number=self._unique_id,
|
||||
)
|
||||
self._attr_unique_id = self._unique_id
|
||||
self._attr_should_poll = True
|
||||
|
||||
# Misc. variables.
|
||||
self._audio_sources: dict[str, str] = {}
|
||||
@@ -147,6 +216,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Beolink compatible sources
|
||||
self._beolink_sources: dict[str, bool] = {}
|
||||
self._remote_leader: BeolinkLeader | None = None
|
||||
# Extra state attributes for showing Beolink: peer(s), listener(s), leader and self
|
||||
self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
@@ -156,9 +227,11 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
CONNECTION_STATUS: self._async_update_connection_state,
|
||||
WebsocketNotification.ACTIVE_LISTENING_MODE: self._async_update_sound_modes,
|
||||
WebsocketNotification.BEOLINK: self._async_update_beolink,
|
||||
WebsocketNotification.CONFIGURATION: self._async_update_name_and_beolink,
|
||||
WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error,
|
||||
WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata_and_beolink,
|
||||
WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress,
|
||||
WebsocketNotification.PLAYBACK_SOURCE: self._async_update_sources,
|
||||
WebsocketNotification.PLAYBACK_STATE: self._async_update_playback_state,
|
||||
WebsocketNotification.REMOTE_MENU_CHANGED: self._async_update_sources,
|
||||
WebsocketNotification.SOURCE_CHANGE: self._async_update_source_change,
|
||||
@@ -220,7 +293,23 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
await self._async_update_sound_modes()
|
||||
|
||||
async def _async_update_sources(self) -> None:
|
||||
# Update beolink attributes and device name.
|
||||
await self._async_update_name_and_beolink()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update queue settings."""
|
||||
# The WebSocket event listener is the main handler for connection state.
|
||||
# The polling updates do therefore not set the device as available or unavailable
|
||||
with contextlib.suppress(ApiException, ClientConnectorError, TimeoutError):
|
||||
queue_settings = await self._client.get_settings_queue(_request_timeout=5)
|
||||
|
||||
if queue_settings.repeat is not None:
|
||||
self._attr_repeat = BANG_OLUFSEN_REPEAT_TO_HA[queue_settings.repeat]
|
||||
|
||||
if queue_settings.shuffle is not None:
|
||||
self._attr_shuffle = queue_settings.shuffle
|
||||
|
||||
async def _async_update_sources(self, _: Source | None = None) -> None:
|
||||
"""Get sources for the specific product."""
|
||||
|
||||
# Audio sources
|
||||
@@ -247,10 +336,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self._audio_sources = {
|
||||
source.id: source.name
|
||||
for source in cast(list[Source], sources.items)
|
||||
if source.is_enabled
|
||||
and source.id
|
||||
and source.name
|
||||
and source.id not in HIDDEN_SOURCE_IDS
|
||||
if source.is_enabled and source.id and source.name and source.is_playable
|
||||
}
|
||||
|
||||
# Some sources are not Beolink expandable, meaning that they can't be joined by
|
||||
@@ -352,9 +438,44 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _async_update_name_and_beolink(self) -> None:
|
||||
"""Update the device friendly name."""
|
||||
beolink_self = await self._client.get_beolink_self()
|
||||
|
||||
# Update device name
|
||||
device_registry = dr.async_get(self.hass)
|
||||
assert self.device_entry is not None
|
||||
|
||||
device_registry.async_update_device(
|
||||
device_id=self.device_entry.id,
|
||||
name=beolink_self.friendly_name,
|
||||
)
|
||||
|
||||
await self._async_update_beolink()
|
||||
|
||||
async def _async_update_beolink(self) -> None:
|
||||
"""Update the current Beolink leader, listeners, peers and self."""
|
||||
|
||||
self._beolink_attributes = {}
|
||||
|
||||
assert self.device_entry is not None
|
||||
assert self.device_entry.name is not None
|
||||
|
||||
# Add Beolink self
|
||||
self._beolink_attributes = {
|
||||
"beolink": {"self": {self.device_entry.name: self._beolink_jid}}
|
||||
}
|
||||
|
||||
# Add Beolink peers
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
if len(peers) > 0:
|
||||
self._beolink_attributes["beolink"]["peers"] = {}
|
||||
for peer in peers:
|
||||
self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = (
|
||||
peer.jid
|
||||
)
|
||||
|
||||
# Add Beolink listeners / leader
|
||||
self._remote_leader = self._playback_metadata.remote_leader
|
||||
|
||||
@@ -374,9 +495,14 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Add self
|
||||
group_members.append(self.entity_id)
|
||||
|
||||
self._beolink_attributes["beolink"]["leader"] = {
|
||||
self._remote_leader.friendly_name: self._remote_leader.jid,
|
||||
}
|
||||
|
||||
# If not listener, check if leader.
|
||||
else:
|
||||
beolink_listeners = await self._client.get_beolink_listeners()
|
||||
beolink_listeners_attribute = {}
|
||||
|
||||
# Check if the device is a leader.
|
||||
if len(beolink_listeners) > 0:
|
||||
@@ -397,6 +523,18 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
for beolink_listener in beolink_listeners
|
||||
]
|
||||
)
|
||||
# Update Beolink attributes
|
||||
for beolink_listener in beolink_listeners:
|
||||
for peer in peers:
|
||||
if peer.jid == beolink_listener.jid:
|
||||
# Get the friendly names for the listeners from the peers
|
||||
beolink_listeners_attribute[peer.friendly_name] = (
|
||||
beolink_listener.jid
|
||||
)
|
||||
break
|
||||
self._beolink_attributes["beolink"]["listeners"] = (
|
||||
beolink_listeners_attribute
|
||||
)
|
||||
|
||||
self._attr_group_members = group_members
|
||||
|
||||
@@ -464,6 +602,17 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def supported_features(self) -> MediaPlayerEntityFeature:
|
||||
"""Flag media player features that are supported."""
|
||||
features = BANG_OLUFSEN_FEATURES
|
||||
|
||||
# Add seeking if supported by the current source
|
||||
if self._source_change.is_seekable is True:
|
||||
features |= MediaPlayerEntityFeature.SEEK
|
||||
|
||||
return features
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState:
|
||||
"""Return the current state of the media player."""
|
||||
@@ -539,38 +688,19 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Return the current audio source."""
|
||||
|
||||
# Try to fix some of the source_change chromecast weirdness.
|
||||
if hasattr(self._playback_metadata, "title"):
|
||||
# source_change is chromecast but line in is selected.
|
||||
if self._playback_metadata.title == BangOlufsenSource.LINE_IN.name:
|
||||
return BangOlufsenSource.LINE_IN.name
|
||||
|
||||
# source_change is chromecast but bluetooth is selected.
|
||||
if self._playback_metadata.title == BangOlufsenSource.BLUETOOTH.name:
|
||||
return BangOlufsenSource.BLUETOOTH.name
|
||||
|
||||
# source_change is line in, bluetooth or optical but stale metadata is sent through the WebSocket,
|
||||
# And the source has not changed.
|
||||
if self._source_change.id in (
|
||||
BangOlufsenSource.BLUETOOTH.id,
|
||||
BangOlufsenSource.LINE_IN.id,
|
||||
BangOlufsenSource.SPDIF.id,
|
||||
):
|
||||
return BangOlufsenSource.CHROMECAST.name
|
||||
|
||||
# source_change is chromecast and there is metadata but no artwork. Bluetooth does support metadata but not artwork
|
||||
# So i assume that it is bluetooth and not chromecast
|
||||
if (
|
||||
hasattr(self._playback_metadata, "art")
|
||||
and self._playback_metadata.art is not None
|
||||
and len(self._playback_metadata.art) == 0
|
||||
and self._source_change.id == BangOlufsenSource.CHROMECAST.id
|
||||
):
|
||||
return BangOlufsenSource.BLUETOOTH.name
|
||||
|
||||
return self._source_change.name
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return information that is not returned anywhere else."""
|
||||
attributes: dict[str, Any] = {}
|
||||
|
||||
# Add Beolink attributes
|
||||
if self._beolink_attributes:
|
||||
attributes.update(self._beolink_attributes)
|
||||
|
||||
return attributes
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Set the device to "networkStandby"."""
|
||||
await self._client.post_standby()
|
||||
@@ -610,17 +740,12 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
async def async_media_seek(self, position: float) -> None:
|
||||
"""Seek to position in ms."""
|
||||
if self._source_change.id == BangOlufsenSource.DEEZER.id:
|
||||
await self._client.seek_to_position(position_ms=int(position * 1000))
|
||||
# Try to prevent the playback progress from bouncing in the UI.
|
||||
self._attr_media_position_updated_at = utcnow()
|
||||
self._playback_progress = PlaybackProgress(progress=int(position))
|
||||
await self._client.seek_to_position(position_ms=int(position * 1000))
|
||||
# Try to prevent the playback progress from bouncing in the UI.
|
||||
self._attr_media_position_updated_at = utcnow()
|
||||
self._playback_progress = PlaybackProgress(progress=int(position))
|
||||
|
||||
self.async_write_ha_state()
|
||||
else:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN, translation_key="non_deezer_seeking"
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_media_previous_track(self) -> None:
|
||||
"""Send the previous track command."""
|
||||
@@ -630,6 +755,20 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Clear the current playback queue."""
|
||||
await self._client.post_clear_queue()
|
||||
|
||||
async def async_set_repeat(self, repeat: RepeatMode) -> None:
|
||||
"""Set playback queues to repeat."""
|
||||
await self._client.set_settings_queue(
|
||||
play_queue_settings=PlayQueueSettings(
|
||||
repeat=BANG_OLUFSEN_REPEAT_FROM_HA[repeat]
|
||||
)
|
||||
)
|
||||
|
||||
async def async_set_shuffle(self, shuffle: bool) -> None:
|
||||
"""Set playback queues to shuffle."""
|
||||
await self._client.set_settings_queue(
|
||||
play_queue_settings=PlayQueueSettings(shuffle=shuffle),
|
||||
)
|
||||
|
||||
async def async_select_source(self, source: str) -> None:
|
||||
"""Select an input source."""
|
||||
if source not in self._sources.values():
|
||||
@@ -833,23 +972,30 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Beolink compatible B&O device.
|
||||
# Repeated presses / calls will cycle between compatible playing devices.
|
||||
if len(group_members) == 0:
|
||||
await self._async_beolink_join()
|
||||
await self.async_beolink_join()
|
||||
return
|
||||
|
||||
# Get JID for each group member
|
||||
jids = [self._get_beolink_jid(group_member) for group_member in group_members]
|
||||
await self._async_beolink_expand(jids)
|
||||
await self.async_beolink_expand(jids)
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Unjoin Beolink session. End session if leader."""
|
||||
await self._async_beolink_leave()
|
||||
await self.async_beolink_leave()
|
||||
|
||||
async def _async_beolink_join(self) -> None:
|
||||
# Custom actions:
|
||||
async def async_beolink_join(self, beolink_jid: str | None = None) -> None:
|
||||
"""Join a Beolink multi-room experience."""
|
||||
await self._client.join_latest_beolink_experience()
|
||||
if beolink_jid is None:
|
||||
await self._client.join_latest_beolink_experience()
|
||||
else:
|
||||
await self._client.join_beolink_peer(jid=beolink_jid)
|
||||
|
||||
async def _async_beolink_expand(self, beolink_jids: list[str]) -> None:
|
||||
async def async_beolink_expand(
|
||||
self, beolink_jids: list[str] | None = None, all_discovered: bool = False
|
||||
) -> None:
|
||||
"""Expand a Beolink multi-room experience with a device or devices."""
|
||||
|
||||
# Ensure that the current source is expandable
|
||||
if not self._beolink_sources[cast(str, self._source_change.id)]:
|
||||
raise ServiceValidationError(
|
||||
@@ -861,10 +1007,37 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
},
|
||||
)
|
||||
|
||||
# Try to expand to all defined devices
|
||||
for beolink_jid in beolink_jids:
|
||||
await self._client.post_beolink_expand(jid=beolink_jid)
|
||||
# Expand to all discovered devices
|
||||
if all_discovered:
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
async def _async_beolink_leave(self) -> None:
|
||||
for peer in peers:
|
||||
try:
|
||||
await self._client.post_beolink_expand(jid=peer.jid)
|
||||
except NotFoundException:
|
||||
_LOGGER.warning("Unable to expand to %s", peer.jid)
|
||||
|
||||
# Try to expand to all defined devices
|
||||
elif beolink_jids:
|
||||
for beolink_jid in beolink_jids:
|
||||
try:
|
||||
await self._client.post_beolink_expand(jid=beolink_jid)
|
||||
except NotFoundException:
|
||||
_LOGGER.warning(
|
||||
"Unable to expand to %s. Is the device available on the network?",
|
||||
beolink_jid,
|
||||
)
|
||||
|
||||
async def async_beolink_unexpand(self, beolink_jids: list[str]) -> None:
|
||||
"""Unexpand a Beolink multi-room experience with a device or devices."""
|
||||
# Unexpand all defined devices
|
||||
for beolink_jid in beolink_jids:
|
||||
await self._client.post_beolink_unexpand(jid=beolink_jid)
|
||||
|
||||
async def async_beolink_leave(self) -> None:
|
||||
"""Leave the current Beolink experience."""
|
||||
await self._client.post_beolink_leave()
|
||||
|
||||
async def async_beolink_allstandby(self) -> None:
|
||||
"""Set all connected Beolink devices to standby."""
|
||||
await self._client.post_beolink_allstandby()
|
||||
|
79
homeassistant/components/bang_olufsen/services.yaml
Normal file
79
homeassistant/components/bang_olufsen/services.yaml
Normal file
@@ -0,0 +1,79 @@
|
||||
beolink_allstandby:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
example: false
|
||||
selector:
|
||||
boolean:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jids:
|
||||
required: false
|
||||
example: >-
|
||||
[
|
||||
1111.2222222.33333333@products.bang-olufsen.com,
|
||||
4444.5555555.66666666@products.bang-olufsen.com
|
||||
]
|
||||
selector:
|
||||
object:
|
||||
|
||||
beolink_join:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jid:
|
||||
required: false
|
||||
example: 1111.2222222.33333333@products.bang-olufsen.com
|
||||
selector:
|
||||
text:
|
||||
|
||||
beolink_leave:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jids:
|
||||
required: true
|
||||
example: >-
|
||||
[
|
||||
1111.2222222.33333333@products.bang-olufsen.com,
|
||||
4444.5555555.66666666@products.bang-olufsen.com
|
||||
]
|
||||
selector:
|
||||
object:
|
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"jid_options_name": "JID options",
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
|
||||
},
|
||||
"config": {
|
||||
"error": {
|
||||
"api_exception": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -25,13 +29,72 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"beolink_allstandby": {
|
||||
"name": "Beolink all standby",
|
||||
"description": "Set all Connected Beolink devices to standby."
|
||||
},
|
||||
"beolink_expand": {
|
||||
"name": "Beolink expand",
|
||||
"description": "Expand current Beolink experience.",
|
||||
"fields": {
|
||||
"all_discovered": {
|
||||
"name": "All discovered",
|
||||
"description": "Expand Beolink experience to all discovered devices."
|
||||
},
|
||||
"beolink_jids": {
|
||||
"name": "Beolink JIDs",
|
||||
"description": "Specify which Beolink JIDs will join current Beolink experience."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beolink_join": {
|
||||
"name": "Beolink join",
|
||||
"description": "Join a Beolink experience.",
|
||||
"fields": {
|
||||
"beolink_jid": {
|
||||
"name": "Beolink JID",
|
||||
"description": "Manually specify Beolink JID to join."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beolink_leave": {
|
||||
"name": "Beolink leave",
|
||||
"description": "Leave a Beolink experience."
|
||||
},
|
||||
"beolink_unexpand": {
|
||||
"name": "Beolink unexpand",
|
||||
"description": "Unexpand from current Beolink experience.",
|
||||
"fields": {
|
||||
"beolink_jids": {
|
||||
"name": "Beolink JIDs",
|
||||
"description": "Specify which Beolink JIDs will leave from current Beolink experience."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"m3u_invalid_format": {
|
||||
"message": "Media sources with the .m3u extension are not supported."
|
||||
},
|
||||
"non_deezer_seeking": {
|
||||
"message": "Seeking is currently only supported when using Deezer"
|
||||
},
|
||||
"invalid_source": {
|
||||
"message": "Invalid source: {invalid_source}. Valid sources are: {valid_sources}"
|
||||
},
|
||||
|
@@ -63,6 +63,9 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_playback_progress_notifications(
|
||||
self.on_playback_progress_notification
|
||||
)
|
||||
self._client.get_playback_source_notifications(
|
||||
self.on_playback_source_notification
|
||||
)
|
||||
self._client.get_playback_state_notifications(
|
||||
self.on_playback_state_notification
|
||||
)
|
||||
@@ -117,6 +120,11 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.CONFIGURATION:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
@@ -157,6 +165,14 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_playback_source_notification(self, notification: Source) -> None:
|
||||
"""Send playback_source dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_source_change_notification(self, notification: Source) -> None:
|
||||
"""Send source_change dispatch."""
|
||||
async_dispatcher_send(
|
||||
|
@@ -10,7 +10,6 @@ from blinkpy.blinkpy import Blink
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import persistent_notification
|
||||
from homeassistant.config_entries import SOURCE_REAUTH
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
@@ -41,13 +40,11 @@ SERVICE_SAVE_RECENT_CLIPS_SCHEMA = vol.Schema(
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def _reauth_flow_wrapper(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
async def _reauth_flow_wrapper(
|
||||
hass: HomeAssistant, entry: BlinkConfigEntry, data: dict[str, Any]
|
||||
) -> None:
|
||||
"""Reauth flow wrapper."""
|
||||
hass.add_job(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_REAUTH}, data=data
|
||||
)
|
||||
)
|
||||
entry.async_start_reauth(hass, data=data)
|
||||
persistent_notification.async_create(
|
||||
hass,
|
||||
(
|
||||
@@ -64,10 +61,10 @@ async def async_migrate_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> b
|
||||
data = {**entry.data}
|
||||
if entry.version == 1:
|
||||
data.pop("login_response", None)
|
||||
await _reauth_flow_wrapper(hass, data)
|
||||
await _reauth_flow_wrapper(hass, entry, data)
|
||||
return False
|
||||
if entry.version == 2:
|
||||
await _reauth_flow_wrapper(hass, data)
|
||||
await _reauth_flow_wrapper(hass, entry, data)
|
||||
return False
|
||||
return True
|
||||
|
||||
|
@@ -10,7 +10,11 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature
|
||||
from homeassistant.const import (
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -32,6 +36,8 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WIFI_STRENGTH,
|
||||
translation_key="wifi_strength",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
@@ -364,12 +364,13 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self.is_grouped and not self.is_master:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
status = self._status.state
|
||||
if status in ("pause", "stop"):
|
||||
return MediaPlayerState.PAUSED
|
||||
if status in ("stream", "play"):
|
||||
return MediaPlayerState.PLAYING
|
||||
return MediaPlayerState.IDLE
|
||||
match self._status.state:
|
||||
case "pause":
|
||||
return MediaPlayerState.PAUSED
|
||||
case "stream" | "play":
|
||||
return MediaPlayerState.PLAYING
|
||||
case _:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
@@ -769,7 +770,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Send volume_up command to media player."""
|
||||
volume = int(volume * 100)
|
||||
volume = int(round(volume * 100))
|
||||
volume = min(100, volume)
|
||||
volume = max(0, volume)
|
||||
|
||||
|
@@ -7,7 +7,11 @@ from typing import Any
|
||||
|
||||
from bimmer_connected.api.authentication import MyBMWAuthentication
|
||||
from bimmer_connected.api.regions import get_region_from_name
|
||||
from bimmer_connected.models import MyBMWAPIError, MyBMWAuthError
|
||||
from bimmer_connected.models import (
|
||||
MyBMWAPIError,
|
||||
MyBMWAuthError,
|
||||
MyBMWCaptchaMissingError,
|
||||
)
|
||||
from httpx import RequestError
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -17,7 +21,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_SOURCE, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -54,6 +58,8 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str,
|
||||
|
||||
try:
|
||||
await auth.login()
|
||||
except MyBMWCaptchaMissingError as ex:
|
||||
raise MissingCaptcha from ex
|
||||
except MyBMWAuthError as ex:
|
||||
raise InvalidAuth from ex
|
||||
except (MyBMWAPIError, RequestError) as ex:
|
||||
@@ -98,6 +104,8 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_REFRESH_TOKEN: info.get(CONF_REFRESH_TOKEN),
|
||||
CONF_GCID: info.get(CONF_GCID),
|
||||
}
|
||||
except MissingCaptcha:
|
||||
errors["base"] = "missing_captcha"
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
@@ -145,10 +153,10 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> BMWOptionsFlow:
|
||||
"""Return a MyBMW option flow."""
|
||||
return BMWOptionsFlow(config_entry)
|
||||
return BMWOptionsFlow()
|
||||
|
||||
|
||||
class BMWOptionsFlow(OptionsFlowWithConfigEntry):
|
||||
class BMWOptionsFlow(OptionsFlow):
|
||||
"""Handle a option flow for MyBMW."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -192,3 +200,7 @@ class CannotConnect(HomeAssistantError):
|
||||
|
||||
class InvalidAuth(HomeAssistantError):
|
||||
"""Error to indicate there is invalid auth."""
|
||||
|
||||
|
||||
class MissingCaptcha(HomeAssistantError):
|
||||
"""Error to indicate the captcha token is missing."""
|
||||
|
@@ -7,7 +7,12 @@ import logging
|
||||
|
||||
from bimmer_connected.account import MyBMWAccount
|
||||
from bimmer_connected.api.regions import get_region_from_name
|
||||
from bimmer_connected.models import GPSPosition, MyBMWAPIError, MyBMWAuthError
|
||||
from bimmer_connected.models import (
|
||||
GPSPosition,
|
||||
MyBMWAPIError,
|
||||
MyBMWAuthError,
|
||||
MyBMWCaptchaMissingError,
|
||||
)
|
||||
from httpx import RequestError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -61,6 +66,12 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
|
||||
try:
|
||||
await self.account.get_vehicles()
|
||||
except MyBMWCaptchaMissingError as err:
|
||||
# If a captcha is required (user/password login flow), always trigger the reauth flow
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="missing_captcha",
|
||||
) from err
|
||||
except MyBMWAuthError as err:
|
||||
# Allow one retry interval before raising AuthFailed to avoid flaky API issues
|
||||
if self.last_update_success:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bimmer_connected"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["bimmer-connected[china]==0.16.3"]
|
||||
"requirements": ["bimmer-connected[china]==0.16.4"]
|
||||
}
|
||||
|
@@ -11,7 +11,8 @@
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"missing_captcha": "Captcha validation missing"
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
@@ -200,6 +201,9 @@
|
||||
"exceptions": {
|
||||
"invalid_poi": {
|
||||
"message": "Invalid data for point of interest: {poi_exception}"
|
||||
},
|
||||
"missing_captcha": {
|
||||
"message": "Login requires captcha validation"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -39,16 +39,21 @@ HOST_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
def write_tls_asset(hass: HomeAssistant, filename: str, asset: bytes) -> None:
|
||||
def write_tls_asset(
|
||||
hass: HomeAssistant, folder: str, filename: str, asset: bytes
|
||||
) -> None:
|
||||
"""Write the tls assets to disk."""
|
||||
makedirs(hass.config.path(DOMAIN), exist_ok=True)
|
||||
with open(hass.config.path(DOMAIN, filename), "w", encoding="utf8") as file_handle:
|
||||
makedirs(hass.config.path(DOMAIN, folder), exist_ok=True)
|
||||
with open(
|
||||
hass.config.path(DOMAIN, folder, filename), "w", encoding="utf8"
|
||||
) as file_handle:
|
||||
file_handle.write(asset.decode("utf-8"))
|
||||
|
||||
|
||||
def create_credentials_and_validate(
|
||||
hass: HomeAssistant,
|
||||
host: str,
|
||||
unique_id: str,
|
||||
user_input: dict[str, Any],
|
||||
zeroconf_instance: zeroconf.HaZeroconf,
|
||||
) -> dict[str, Any] | None:
|
||||
@@ -57,13 +62,15 @@ def create_credentials_and_validate(
|
||||
result = helper.register(host, "HomeAssistant")
|
||||
|
||||
if result is not None:
|
||||
write_tls_asset(hass, CONF_SHC_CERT, result["cert"])
|
||||
write_tls_asset(hass, CONF_SHC_KEY, result["key"])
|
||||
# Save key/certificate pair for each registered host separately
|
||||
# otherwise only the last registered host is accessible.
|
||||
write_tls_asset(hass, unique_id, CONF_SHC_CERT, result["cert"])
|
||||
write_tls_asset(hass, unique_id, CONF_SHC_KEY, result["key"])
|
||||
|
||||
session = SHCSession(
|
||||
host,
|
||||
hass.config.path(DOMAIN, CONF_SHC_CERT),
|
||||
hass.config.path(DOMAIN, CONF_SHC_KEY),
|
||||
hass.config.path(DOMAIN, unique_id, CONF_SHC_CERT),
|
||||
hass.config.path(DOMAIN, unique_id, CONF_SHC_KEY),
|
||||
True,
|
||||
zeroconf_instance,
|
||||
)
|
||||
@@ -143,11 +150,16 @@ class BoschSHCConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
zeroconf_instance = await zeroconf.async_get_instance(self.hass)
|
||||
# unique_id uniquely identifies the registered controller and is used
|
||||
# to save the key/certificate pair for each controller separately
|
||||
unique_id = self.info["unique_id"]
|
||||
assert unique_id
|
||||
try:
|
||||
result = await self.hass.async_add_executor_job(
|
||||
create_credentials_and_validate,
|
||||
self.hass,
|
||||
self.host,
|
||||
unique_id,
|
||||
user_input,
|
||||
zeroconf_instance,
|
||||
)
|
||||
@@ -167,13 +179,18 @@ class BoschSHCConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
assert result
|
||||
entry_data = {
|
||||
CONF_SSL_CERTIFICATE: self.hass.config.path(DOMAIN, CONF_SHC_CERT),
|
||||
CONF_SSL_KEY: self.hass.config.path(DOMAIN, CONF_SHC_KEY),
|
||||
# Each host has its own key/certificate pair
|
||||
CONF_SSL_CERTIFICATE: self.hass.config.path(
|
||||
DOMAIN, unique_id, CONF_SHC_CERT
|
||||
),
|
||||
CONF_SSL_KEY: self.hass.config.path(
|
||||
DOMAIN, unique_id, CONF_SHC_KEY
|
||||
),
|
||||
CONF_HOST: self.host,
|
||||
CONF_TOKEN: result["token"],
|
||||
CONF_HOSTNAME: result["token"].split(":", 1)[1],
|
||||
}
|
||||
existing_entry = await self.async_set_unique_id(self.info["unique_id"])
|
||||
existing_entry = await self.async_set_unique_id(unique_id)
|
||||
if existing_entry:
|
||||
return self.async_update_reload_and_abort(
|
||||
existing_entry,
|
||||
|
@@ -12,6 +12,13 @@
|
||||
},
|
||||
"list_language": {
|
||||
"default": "mdi:earth"
|
||||
},
|
||||
"list_access": {
|
||||
"default": "mdi:account-lock",
|
||||
"state": {
|
||||
"shared": "mdi:account-group",
|
||||
"invitation": "mdi:account-multiple-plus"
|
||||
}
|
||||
}
|
||||
},
|
||||
"todo": {
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bring",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["bring-api==0.9.0"]
|
||||
"requirements": ["bring-api==0.9.1"]
|
||||
}
|
||||
|
@@ -40,6 +40,7 @@ class BringSensor(StrEnum):
|
||||
CONVENIENT = "convenient"
|
||||
DISCOUNTED = "discounted"
|
||||
LIST_LANGUAGE = "list_language"
|
||||
LIST_ACCESS = "list_access"
|
||||
|
||||
|
||||
SENSOR_DESCRIPTIONS: tuple[BringSensorEntityDescription, ...] = (
|
||||
@@ -73,6 +74,14 @@ SENSOR_DESCRIPTIONS: tuple[BringSensorEntityDescription, ...] = (
|
||||
options=[x.lower() for x in BRING_SUPPORTED_LOCALES],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
BringSensorEntityDescription(
|
||||
key=BringSensor.LIST_ACCESS,
|
||||
translation_key=BringSensor.LIST_ACCESS,
|
||||
value_fn=lambda lst, _: lst["status"].lower(),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=["registered", "shared", "invitation"],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@@ -61,6 +61,14 @@
|
||||
"sv-se": "Sweden",
|
||||
"tr-tr": "Türkiye"
|
||||
}
|
||||
},
|
||||
"list_access": {
|
||||
"name": "List access",
|
||||
"state": {
|
||||
"registered": "Private",
|
||||
"shared": "Shared",
|
||||
"invitation": "Invitation pending"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -15,7 +15,7 @@ from broadlink.exceptions import (
|
||||
)
|
||||
from typing_extensions import TypeVar
|
||||
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntry
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
CONF_MAC,
|
||||
@@ -200,10 +200,4 @@ class BroadlinkDevice(Generic[_ApiT]):
|
||||
self.api.host[0],
|
||||
)
|
||||
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_REAUTH},
|
||||
data={CONF_NAME: self.name, **self.config.data},
|
||||
)
|
||||
)
|
||||
self.config.async_start_reauth(self.hass, data={CONF_NAME: self.name})
|
||||
|
@@ -2,79 +2,22 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
import logging
|
||||
|
||||
from aiohttp.client_exceptions import ClientResponseError, ServerDisconnectedError
|
||||
from brunt import BruntClientAsync, Thing
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DATA_BAPI, DATA_COOR, DOMAIN, PLATFORMS, REGULAR_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
from .const import PLATFORMS
|
||||
from .coordinator import BruntConfigEntry, BruntCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BruntConfigEntry) -> bool:
|
||||
"""Set up Brunt using config flow."""
|
||||
session = async_get_clientsession(hass)
|
||||
bapi = BruntClientAsync(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
try:
|
||||
await bapi.async_login()
|
||||
except ServerDisconnectedError as exc:
|
||||
raise ConfigEntryNotReady("Brunt not ready to connect.") from exc
|
||||
except ClientResponseError as exc:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Brunt could not connect with username: {entry.data[CONF_USERNAME]}."
|
||||
) from exc
|
||||
|
||||
async def async_update_data() -> dict[str | None, Thing]:
|
||||
"""Fetch data from the Brunt endpoint for all Things.
|
||||
|
||||
Error 403 is the API response for any kind of authentication error (failed password or email)
|
||||
Error 401 is the API response for things that are not part of the account, could happen when a device is deleted from the account.
|
||||
"""
|
||||
try:
|
||||
async with timeout(10):
|
||||
things = await bapi.async_get_things(force=True)
|
||||
return {thing.serial: thing for thing in things}
|
||||
except ServerDisconnectedError as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
except ClientResponseError as err:
|
||||
if err.status == 403:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
if err.status == 401:
|
||||
_LOGGER.warning("Device not found, will reload Brunt integration")
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
raise UpdateFailed from err
|
||||
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="brunt",
|
||||
update_method=async_update_data,
|
||||
update_interval=REGULAR_INTERVAL,
|
||||
)
|
||||
coordinator = BruntCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = {DATA_BAPI: bapi, DATA_COOR: coordinator}
|
||||
entry.runtime_data = coordinator
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BruntConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
@@ -10,8 +10,6 @@ NOTIFICATION_ID = "brunt_notification"
|
||||
NOTIFICATION_TITLE = "Brunt Cover Setup"
|
||||
ATTRIBUTION = "Based on an unofficial Brunt SDK."
|
||||
PLATFORMS = [Platform.COVER]
|
||||
DATA_BAPI = "bapi"
|
||||
DATA_COOR = "coordinator"
|
||||
|
||||
CLOSED_POSITION = 0
|
||||
OPEN_POSITION = 100
|
||||
|
80
homeassistant/components/brunt/coordinator.py
Normal file
80
homeassistant/components/brunt/coordinator.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""The brunt component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
import logging
|
||||
|
||||
from aiohttp.client_exceptions import ClientResponseError, ServerDisconnectedError
|
||||
from brunt import BruntClientAsync, Thing
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import REGULAR_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type BruntConfigEntry = ConfigEntry[BruntCoordinator]
|
||||
|
||||
|
||||
class BruntCoordinator(DataUpdateCoordinator[dict[str | None, Thing]]):
|
||||
"""Config entry data."""
|
||||
|
||||
bapi: BruntClientAsync
|
||||
config_entry: BruntConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: BruntConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the Brunt coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name="brunt",
|
||||
update_interval=REGULAR_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
|
||||
self.bapi = BruntClientAsync(
|
||||
username=self.config_entry.data[CONF_USERNAME],
|
||||
password=self.config_entry.data[CONF_PASSWORD],
|
||||
session=session,
|
||||
)
|
||||
try:
|
||||
await self.bapi.async_login()
|
||||
except ServerDisconnectedError as exc:
|
||||
raise ConfigEntryNotReady("Brunt not ready to connect.") from exc
|
||||
except ClientResponseError as exc:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Brunt could not connect with username: {self.config_entry.data[CONF_USERNAME]}."
|
||||
) from exc
|
||||
|
||||
async def _async_update_data(self) -> dict[str | None, Thing]:
|
||||
"""Fetch data from the Brunt endpoint for all Things.
|
||||
|
||||
Error 403 is the API response for any kind of authentication error (failed password or email)
|
||||
Error 401 is the API response for things that are not part of the account, could happen when a device is deleted from the account.
|
||||
"""
|
||||
try:
|
||||
async with timeout(10):
|
||||
things = await self.bapi.async_get_things(force=True)
|
||||
return {thing.serial: thing for thing in things}
|
||||
except ServerDisconnectedError as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
except ClientResponseError as err:
|
||||
if err.status == 403:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
if err.status == 401:
|
||||
_LOGGER.warning("Device not found, will reload Brunt integration")
|
||||
await self.hass.config_entries.async_reload(self.config_entry.entry_id)
|
||||
raise UpdateFailed from err
|
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
from typing import Any
|
||||
|
||||
from aiohttp.client_exceptions import ClientResponseError
|
||||
from brunt import BruntClientAsync, Thing
|
||||
from brunt import Thing
|
||||
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
@@ -13,49 +13,39 @@ from homeassistant.components.cover import (
|
||||
CoverEntity,
|
||||
CoverEntityFeature,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
CoordinatorEntity,
|
||||
DataUpdateCoordinator,
|
||||
)
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import (
|
||||
ATTR_REQUEST_POSITION,
|
||||
ATTRIBUTION,
|
||||
CLOSED_POSITION,
|
||||
DATA_BAPI,
|
||||
DATA_COOR,
|
||||
DOMAIN,
|
||||
FAST_INTERVAL,
|
||||
OPEN_POSITION,
|
||||
REGULAR_INTERVAL,
|
||||
)
|
||||
from .coordinator import BruntConfigEntry, BruntCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
entry: BruntConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the brunt platform."""
|
||||
bapi: BruntClientAsync = hass.data[DOMAIN][entry.entry_id][DATA_BAPI]
|
||||
coordinator: DataUpdateCoordinator[dict[str | None, Thing]] = hass.data[DOMAIN][
|
||||
entry.entry_id
|
||||
][DATA_COOR]
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
BruntDevice(coordinator, serial, thing, bapi, entry.entry_id)
|
||||
BruntDevice(coordinator, serial, thing, entry.entry_id)
|
||||
for serial, thing in coordinator.data.items()
|
||||
)
|
||||
|
||||
|
||||
class BruntDevice(
|
||||
CoordinatorEntity[DataUpdateCoordinator[dict[str | None, Thing]]], CoverEntity
|
||||
):
|
||||
class BruntDevice(CoordinatorEntity[BruntCoordinator], CoverEntity):
|
||||
"""Representation of a Brunt cover device.
|
||||
|
||||
Contains the common logic for all Brunt devices.
|
||||
@@ -73,16 +63,14 @@ class BruntDevice(
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DataUpdateCoordinator[dict[str | None, Thing]],
|
||||
coordinator: BruntCoordinator,
|
||||
serial: str | None,
|
||||
thing: Thing,
|
||||
bapi: BruntClientAsync,
|
||||
entry_id: str,
|
||||
) -> None:
|
||||
"""Init the Brunt device."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = serial
|
||||
self._bapi = bapi
|
||||
self._thing = thing
|
||||
self._entry_id = entry_id
|
||||
|
||||
@@ -167,7 +155,7 @@ class BruntDevice(
|
||||
async def _async_update_cover(self, position: int) -> None:
|
||||
"""Set the cover to the new position and wait for the update to be reflected."""
|
||||
try:
|
||||
await self._bapi.async_change_request_position(
|
||||
await self.coordinator.bapi.async_change_request_position(
|
||||
position, thing_uri=self._thing.thing_uri
|
||||
)
|
||||
except ClientResponseError as exc:
|
||||
@@ -182,7 +170,7 @@ class BruntDevice(
|
||||
"""Update the update interval after each refresh."""
|
||||
if (
|
||||
self.request_cover_position
|
||||
== self._bapi.last_requested_positions[self._thing.thing_uri]
|
||||
== self.coordinator.bapi.last_requested_positions[self._thing.thing_uri]
|
||||
and self.move_state == 0
|
||||
):
|
||||
self.coordinator.update_interval = REGULAR_INTERVAL
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==0.6.4"]
|
||||
"requirements": ["python-bsblan==1.2.1"]
|
||||
}
|
||||
|
@@ -364,7 +364,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.CONDUCTIVITY}_{Units.CONDUCTIVITY}",
|
||||
device_class=SensorDeviceClass.CONDUCTIVITY,
|
||||
native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS,
|
||||
native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS_PER_CM,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
@@ -109,6 +109,7 @@ async def async_setup_platform(
|
||||
entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass)
|
||||
coordinator = CalDavUpdateCoordinator(
|
||||
hass,
|
||||
None,
|
||||
calendar=calendar,
|
||||
days=days,
|
||||
include_all_day=True,
|
||||
@@ -126,6 +127,7 @@ async def async_setup_platform(
|
||||
entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass)
|
||||
coordinator = CalDavUpdateCoordinator(
|
||||
hass,
|
||||
None,
|
||||
calendar=calendar,
|
||||
days=days,
|
||||
include_all_day=False,
|
||||
@@ -152,6 +154,7 @@ async def async_setup_entry(
|
||||
async_generate_entity_id(ENTITY_ID_FORMAT, calendar.name, hass=hass),
|
||||
CalDavUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
calendar=calendar,
|
||||
days=CONFIG_ENTRY_DEFAULT_DAYS,
|
||||
include_all_day=True,
|
||||
@@ -204,7 +207,8 @@ class WebDavCalendarEntity(CoordinatorEntity[CalDavUpdateCoordinator], CalendarE
|
||||
if self._supports_offset:
|
||||
self._attr_extra_state_attributes = {
|
||||
"offset_reached": is_offset_reached(
|
||||
self._event.start_datetime_local, self.coordinator.offset
|
||||
self._event.start_datetime_local,
|
||||
self.coordinator.offset, # type: ignore[arg-type]
|
||||
)
|
||||
if self._event
|
||||
else False
|
||||
|
@@ -6,6 +6,9 @@ from datetime import date, datetime, time, timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import caldav
|
||||
|
||||
from homeassistant.components.calendar import CalendarEvent, extract_offset
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -14,6 +17,9 @@ from homeassistant.util import dt as dt_util
|
||||
|
||||
from .api import get_attr_value
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import CalDavConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
|
||||
@@ -23,11 +29,20 @@ OFFSET = "!!"
|
||||
class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]):
|
||||
"""Class to utilize the calendar dav client object to get next event."""
|
||||
|
||||
def __init__(self, hass, calendar, days, include_all_day, search):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: CalDavConfigEntry | None,
|
||||
calendar: caldav.Calendar,
|
||||
days: int,
|
||||
include_all_day: bool,
|
||||
search: str | None,
|
||||
) -> None:
|
||||
"""Set up how we are going to search the WebDav calendar."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=f"CalDAV {calendar.name}",
|
||||
update_interval=MIN_TIME_BETWEEN_UPDATES,
|
||||
)
|
||||
@@ -35,7 +50,7 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]):
|
||||
self.days = days
|
||||
self.include_all_day = include_all_day
|
||||
self.search = search
|
||||
self.offset = None
|
||||
self.offset: timedelta | None = None
|
||||
|
||||
async def async_get_events(
|
||||
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
||||
@@ -109,7 +124,7 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]):
|
||||
_start_of_tomorrow = start_of_tomorrow
|
||||
if _start_of_today <= start_dt < _start_of_tomorrow:
|
||||
new_event = event.copy()
|
||||
new_vevent = new_event.instance.vevent
|
||||
new_vevent = new_event.instance.vevent # type: ignore[attr-defined]
|
||||
if hasattr(new_vevent, "dtend"):
|
||||
dur = new_vevent.dtend.value - new_vevent.dtstart.value
|
||||
new_vevent.dtend.value = start_dt + dur
|
||||
|
@@ -14,9 +14,6 @@
|
||||
},
|
||||
"get_events": {
|
||||
"service": "mdi:calendar-month"
|
||||
},
|
||||
"list_events": {
|
||||
"service": "mdi:calendar-month"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -36,22 +36,6 @@ create_event:
|
||||
example: "Conference Room - F123, Bldg. 002"
|
||||
selector:
|
||||
text:
|
||||
list_events:
|
||||
target:
|
||||
entity:
|
||||
domain: calendar
|
||||
fields:
|
||||
start_date_time:
|
||||
example: "2022-03-22 20:00:00"
|
||||
selector:
|
||||
datetime:
|
||||
end_date_time:
|
||||
example: "2022-03-22 22:00:00"
|
||||
selector:
|
||||
datetime:
|
||||
duration:
|
||||
selector:
|
||||
duration:
|
||||
get_events:
|
||||
target:
|
||||
entity:
|
||||
|
@@ -89,24 +89,6 @@
|
||||
"description": "Returns active events from start_date_time until the specified duration."
|
||||
}
|
||||
}
|
||||
},
|
||||
"list_events": {
|
||||
"name": "List event",
|
||||
"description": "Lists events on a calendar within a time range.",
|
||||
"fields": {
|
||||
"start_date_time": {
|
||||
"name": "[%key:component::calendar::services::get_events::fields::start_date_time::name%]",
|
||||
"description": "[%key:component::calendar::services::get_events::fields::start_date_time::description%]"
|
||||
},
|
||||
"end_date_time": {
|
||||
"name": "[%key:component::calendar::services::get_events::fields::end_date_time::name%]",
|
||||
"description": "[%key:component::calendar::services::get_events::fields::end_date_time::description%]"
|
||||
},
|
||||
"duration": {
|
||||
"name": "[%key:component::calendar::services::get_events::fields::duration::name%]",
|
||||
"description": "[%key:component::calendar::services::get_events::fields::duration::description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.const import CONF_HOST, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
|
||||
from .const import CONNECT_TIMEOUT, STREAM_MAGIC_EXCEPTIONS
|
||||
from .const import CONNECT_TIMEOUT, DOMAIN, STREAM_MAGIC_EXCEPTIONS
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.MEDIA_PLAYER, Platform.SELECT, Platform.SWITCH]
|
||||
|
||||
@@ -45,7 +45,13 @@ async def async_setup_entry(
|
||||
async with asyncio.timeout(CONNECT_TIMEOUT):
|
||||
await client.connect()
|
||||
except STREAM_MAGIC_EXCEPTIONS as err:
|
||||
raise ConfigEntryNotReady(f"Error while connecting to {client.host}") from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_cannot_connect",
|
||||
translation_placeholders={
|
||||
"host": client.host,
|
||||
},
|
||||
) from err
|
||||
entry.runtime_data = client
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
@@ -2,20 +2,22 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.const import CONF_HOST
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.redact import async_redact_data
|
||||
|
||||
from . import CambridgeAudioConfigEntry
|
||||
|
||||
TO_REDACT = {CONF_HOST}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: CambridgeAudioConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for the provided config entry."""
|
||||
client = entry.runtime_data
|
||||
return async_redact_data(
|
||||
{"info": client.info, "sources": client.sources}, TO_REDACT
|
||||
)
|
||||
return {
|
||||
"display": client.display.to_dict(),
|
||||
"info": client.info.to_dict(),
|
||||
"now_playing": client.now_playing.to_dict(),
|
||||
"play_state": client.play_state.to_dict(),
|
||||
"presets_list": client.preset_list.to_dict(),
|
||||
"sources": [s.to_dict() for s in client.sources],
|
||||
"update": client.update.to_dict(),
|
||||
}
|
||||
|
@@ -26,7 +26,12 @@ def command[_EntityT: CambridgeAudioEntity, **_P](
|
||||
await func(self, *args, **kwargs)
|
||||
except STREAM_MAGIC_EXCEPTIONS as exc:
|
||||
raise HomeAssistantError(
|
||||
f"Error executing {func.__name__} on entity {self.entity_id},"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_error",
|
||||
translation_placeholders={
|
||||
"function_name": func.__name__,
|
||||
"entity_id": self.entity_id,
|
||||
},
|
||||
) from exc
|
||||
|
||||
return decorator
|
||||
@@ -62,4 +67,4 @@ class CambridgeAudioEntity(Entity):
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove callbacks."""
|
||||
await self.client.unregister_state_update_callbacks(self._state_update_callback)
|
||||
self.client.unregister_state_update_callbacks(self._state_update_callback)
|
||||
|
@@ -8,6 +8,9 @@
|
||||
"dim": "mdi:brightness-6",
|
||||
"off": "mdi:brightness-3"
|
||||
}
|
||||
},
|
||||
"audio_output": {
|
||||
"default": "mdi:audio-input-stereo-minijack"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"requirements": ["aiostreammagic==2.8.1"],
|
||||
"requirements": ["aiostreammagic==2.8.4"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
@@ -177,12 +177,9 @@ class CambridgeAudioDevice(CambridgeAudioEntity, MediaPlayerEntity):
|
||||
return volume / 100
|
||||
|
||||
@property
|
||||
def shuffle(self) -> bool | None:
|
||||
def shuffle(self) -> bool:
|
||||
"""Current shuffle configuration."""
|
||||
mode_shuffle = self.client.play_state.mode_shuffle
|
||||
if not mode_shuffle:
|
||||
return False
|
||||
return mode_shuffle != ShuffleMode.OFF
|
||||
return self.client.play_state.mode_shuffle != ShuffleMode.OFF
|
||||
|
||||
@property
|
||||
def repeat(self) -> RepeatMode | None:
|
||||
|
@@ -1,7 +1,7 @@
|
||||
"""Support for Cambridge Audio select entities."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from aiostreammagic import StreamMagicClient
|
||||
from aiostreammagic.models import DisplayBrightness
|
||||
@@ -19,10 +19,34 @@ from .entity import CambridgeAudioEntity
|
||||
class CambridgeAudioSelectEntityDescription(SelectEntityDescription):
|
||||
"""Describes Cambridge Audio select entity."""
|
||||
|
||||
options_fn: Callable[[StreamMagicClient], list[str]] = field(default=lambda _: [])
|
||||
load_fn: Callable[[StreamMagicClient], bool] = field(default=lambda _: True)
|
||||
value_fn: Callable[[StreamMagicClient], str | None]
|
||||
set_value_fn: Callable[[StreamMagicClient, str], Awaitable[None]]
|
||||
|
||||
|
||||
async def _audio_output_set_value_fn(client: StreamMagicClient, value: str) -> None:
|
||||
"""Set the audio output using the display name."""
|
||||
audio_output_id = next(
|
||||
(output.id for output in client.audio_output.outputs if value == output.name),
|
||||
None,
|
||||
)
|
||||
assert audio_output_id is not None
|
||||
await client.set_audio_output(audio_output_id)
|
||||
|
||||
|
||||
def _audio_output_value_fn(client: StreamMagicClient) -> str | None:
|
||||
"""Convert the current audio output id to name."""
|
||||
return next(
|
||||
(
|
||||
output.name
|
||||
for output in client.audio_output.outputs
|
||||
if client.state.audio_output == output.id
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = (
|
||||
CambridgeAudioSelectEntityDescription(
|
||||
key="display_brightness",
|
||||
@@ -34,6 +58,17 @@ CONTROL_ENTITIES: tuple[CambridgeAudioSelectEntityDescription, ...] = (
|
||||
DisplayBrightness(value)
|
||||
),
|
||||
),
|
||||
CambridgeAudioSelectEntityDescription(
|
||||
key="audio_output",
|
||||
translation_key="audio_output",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
options_fn=lambda client: [
|
||||
output.name for output in client.audio_output.outputs
|
||||
],
|
||||
load_fn=lambda client: len(client.audio_output.outputs) > 0,
|
||||
value_fn=_audio_output_value_fn,
|
||||
set_value_fn=_audio_output_set_value_fn,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -46,7 +81,9 @@ async def async_setup_entry(
|
||||
|
||||
client: StreamMagicClient = entry.runtime_data
|
||||
entities: list[CambridgeAudioSelect] = [
|
||||
CambridgeAudioSelect(client, description) for description in CONTROL_ENTITIES
|
||||
CambridgeAudioSelect(client, description)
|
||||
for description in CONTROL_ENTITIES
|
||||
if description.load_fn(client)
|
||||
]
|
||||
async_add_entities(entities)
|
||||
|
||||
@@ -65,6 +102,9 @@ class CambridgeAudioSelect(CambridgeAudioEntity, SelectEntity):
|
||||
super().__init__(client)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{client.info.unit_id}-{description.key}"
|
||||
options_fn = description.options_fn(client)
|
||||
if options_fn:
|
||||
self._attr_options = options_fn
|
||||
|
||||
@property
|
||||
def current_option(self) -> str | None:
|
||||
|
@@ -32,6 +32,9 @@
|
||||
"dim": "Dim",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"audio_output": {
|
||||
"name": "Audio output"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
@@ -52,6 +55,12 @@
|
||||
},
|
||||
"preset_non_integer": {
|
||||
"message": "Preset must be an integer, got: {preset_id}"
|
||||
},
|
||||
"entry_cannot_connect": {
|
||||
"message": "Error while connecting to {host}"
|
||||
},
|
||||
"command_error": {
|
||||
"message": "Error executing {function_name} on entity {entity_id}"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -4,9 +4,9 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import collections
|
||||
from collections.abc import Awaitable, Callable
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from contextlib import suppress
|
||||
from dataclasses import asdict
|
||||
from dataclasses import asdict, dataclass
|
||||
from datetime import datetime, timedelta
|
||||
from enum import IntFlag
|
||||
from functools import partial
|
||||
@@ -18,9 +18,9 @@ from typing import Any, Final, final
|
||||
|
||||
from aiohttp import hdrs, web
|
||||
import attr
|
||||
from propcache import cached_property
|
||||
from propcache import cached_property, under_cached_property
|
||||
import voluptuous as vol
|
||||
from webrtc_models import RTCIceServer
|
||||
from webrtc_models import RTCIceCandidate, RTCIceServer
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.http import KEY_AUTHENTICATED, HomeAssistantView
|
||||
@@ -86,12 +86,20 @@ from .img_util import scale_jpeg_camera_image
|
||||
from .prefs import CameraPreferences, DynamicStreamSettings # noqa: F401
|
||||
from .webrtc import (
|
||||
DATA_ICE_SERVERS,
|
||||
CameraWebRTCLegacyProvider,
|
||||
CameraWebRTCProvider,
|
||||
WebRTCAnswer,
|
||||
WebRTCCandidate, # noqa: F401
|
||||
WebRTCClientConfiguration,
|
||||
async_get_supported_providers,
|
||||
WebRTCError,
|
||||
WebRTCMessage, # noqa: F401
|
||||
WebRTCSendMessage,
|
||||
async_get_supported_legacy_provider,
|
||||
async_get_supported_provider,
|
||||
async_register_ice_servers,
|
||||
async_register_rtsp_to_web_rtc_provider, # noqa: F401
|
||||
ws_get_client_config,
|
||||
async_register_webrtc_provider, # noqa: F401
|
||||
async_register_ws,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -169,6 +177,13 @@ class Image:
|
||||
content: bytes = attr.ib()
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class CameraCapabilities:
|
||||
"""Camera capabilities."""
|
||||
|
||||
frontend_stream_types: set[StreamType]
|
||||
|
||||
|
||||
@bind_hass
|
||||
async def async_request_stream(hass: HomeAssistant, entity_id: str, fmt: str) -> str:
|
||||
"""Request a stream for a camera entity."""
|
||||
@@ -342,10 +357,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
hass.http.register_view(CameraMjpegStream(component))
|
||||
|
||||
websocket_api.async_register_command(hass, ws_camera_stream)
|
||||
websocket_api.async_register_command(hass, ws_camera_web_rtc_offer)
|
||||
websocket_api.async_register_command(hass, websocket_get_prefs)
|
||||
websocket_api.async_register_command(hass, websocket_update_prefs)
|
||||
websocket_api.async_register_command(hass, ws_get_client_config)
|
||||
websocket_api.async_register_command(hass, ws_camera_capabilities)
|
||||
async_register_ws(hass)
|
||||
|
||||
await component.async_setup(config)
|
||||
|
||||
@@ -405,7 +420,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
def get_ice_servers() -> list[RTCIceServer]:
|
||||
if hass.config.webrtc.ice_servers:
|
||||
return hass.config.webrtc.ice_servers
|
||||
return [RTCIceServer(urls="stun:stun.home-assistant.io:80")]
|
||||
return [
|
||||
RTCIceServer(
|
||||
urls=[
|
||||
"stun:stun.home-assistant.io:80",
|
||||
"stun:stun.home-assistant.io:3478",
|
||||
]
|
||||
),
|
||||
]
|
||||
|
||||
async_register_ice_servers(hass, get_ice_servers)
|
||||
return True
|
||||
@@ -454,8 +476,11 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
_attr_state: None = None # State is determined by is_on
|
||||
_attr_supported_features: CameraEntityFeature = CameraEntityFeature(0)
|
||||
|
||||
__supports_stream: CameraEntityFeature | None = None
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a camera."""
|
||||
self._cache: dict[str, Any] = {}
|
||||
self.stream: Stream | None = None
|
||||
self.stream_options: dict[str, str | bool | float] = {}
|
||||
self.content_type: str = DEFAULT_CONTENT_TYPE
|
||||
@@ -463,7 +488,15 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self._warned_old_signature = False
|
||||
self.async_update_token()
|
||||
self._create_stream_lock: asyncio.Lock | None = None
|
||||
self._webrtc_providers: list[CameraWebRTCProvider] = []
|
||||
self._webrtc_provider: CameraWebRTCProvider | None = None
|
||||
self._legacy_webrtc_provider: CameraWebRTCLegacyProvider | None = None
|
||||
self._supports_native_sync_webrtc = (
|
||||
type(self).async_handle_web_rtc_offer != Camera.async_handle_web_rtc_offer
|
||||
)
|
||||
self._supports_native_async_webrtc = (
|
||||
type(self).async_handle_async_webrtc_offer
|
||||
!= Camera.async_handle_async_webrtc_offer
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def entity_picture(self) -> str:
|
||||
@@ -537,7 +570,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
return self._attr_frontend_stream_type
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
if self._webrtc_providers:
|
||||
if self._webrtc_provider or self._legacy_webrtc_provider:
|
||||
return StreamType.WEB_RTC
|
||||
return StreamType.HLS
|
||||
|
||||
@@ -587,12 +620,66 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
Integrations can override with a native WebRTC implementation.
|
||||
"""
|
||||
for provider in self._webrtc_providers:
|
||||
if answer := await provider.async_handle_web_rtc_offer(self, offer_sdp):
|
||||
return answer
|
||||
raise HomeAssistantError(
|
||||
"WebRTC offer was not accepted by the supported providers"
|
||||
)
|
||||
|
||||
async def async_handle_async_webrtc_offer(
|
||||
self, offer_sdp: str, session_id: str, send_message: WebRTCSendMessage
|
||||
) -> None:
|
||||
"""Handle the async WebRTC offer.
|
||||
|
||||
Async means that it could take some time to process the offer and responses/message
|
||||
will be sent with the send_message callback.
|
||||
This method is used by cameras with CameraEntityFeature.STREAM and StreamType.WEB_RTC.
|
||||
An integration overriding this method must also implement async_on_webrtc_candidate.
|
||||
|
||||
Integrations can override with a native WebRTC implementation.
|
||||
"""
|
||||
if self._supports_native_sync_webrtc:
|
||||
try:
|
||||
answer = await self.async_handle_web_rtc_offer(offer_sdp)
|
||||
except ValueError as ex:
|
||||
_LOGGER.error("Error handling WebRTC offer: %s", ex)
|
||||
send_message(
|
||||
WebRTCError(
|
||||
"webrtc_offer_failed",
|
||||
str(ex),
|
||||
)
|
||||
)
|
||||
except TimeoutError:
|
||||
# This catch was already here and should stay through the deprecation
|
||||
_LOGGER.error("Timeout handling WebRTC offer")
|
||||
send_message(
|
||||
WebRTCError(
|
||||
"webrtc_offer_failed",
|
||||
"Timeout handling WebRTC offer",
|
||||
)
|
||||
)
|
||||
else:
|
||||
if answer:
|
||||
send_message(WebRTCAnswer(answer))
|
||||
else:
|
||||
_LOGGER.error("Error handling WebRTC offer: No answer")
|
||||
send_message(
|
||||
WebRTCError(
|
||||
"webrtc_offer_failed",
|
||||
"No answer on WebRTC offer",
|
||||
)
|
||||
)
|
||||
return
|
||||
|
||||
if self._webrtc_provider:
|
||||
await self._webrtc_provider.async_handle_async_webrtc_offer(
|
||||
self, offer_sdp, session_id, send_message
|
||||
)
|
||||
return
|
||||
|
||||
if self._legacy_webrtc_provider and (
|
||||
answer := await self._legacy_webrtc_provider.async_handle_web_rtc_offer(
|
||||
self, offer_sdp
|
||||
)
|
||||
):
|
||||
send_message(WebRTCAnswer(answer))
|
||||
else:
|
||||
raise HomeAssistantError("Camera does not support WebRTC")
|
||||
|
||||
def camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
@@ -702,57 +789,133 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
# Avoid calling async_refresh_providers() in here because it
|
||||
# it will write state a second time since state is always
|
||||
# written when an entity is added to hass.
|
||||
self._webrtc_providers = await self._async_get_supported_webrtc_providers()
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self) -> None:
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
"""Determine if any of the registered providers are suitable for this entity.
|
||||
|
||||
This affects state attributes, so it should be invoked any time the registered
|
||||
providers or inputs to the state attributes change.
|
||||
|
||||
Returns True if any state was updated (and needs to be written)
|
||||
"""
|
||||
old_providers = self._webrtc_providers
|
||||
new_providers = await self._async_get_supported_webrtc_providers()
|
||||
self._webrtc_providers = new_providers
|
||||
if old_providers != new_providers:
|
||||
self.async_write_ha_state()
|
||||
old_provider = self._webrtc_provider
|
||||
old_legacy_provider = self._legacy_webrtc_provider
|
||||
new_provider = None
|
||||
new_legacy_provider = None
|
||||
|
||||
async def _async_get_supported_webrtc_providers(
|
||||
self,
|
||||
) -> list[CameraWebRTCProvider]:
|
||||
"""Get the all providers that supports this camera."""
|
||||
# Skip all providers if the camera has a native WebRTC implementation
|
||||
if not (
|
||||
self._supports_native_sync_webrtc or self._supports_native_async_webrtc
|
||||
):
|
||||
# Camera doesn't have a native WebRTC implementation
|
||||
new_provider = await self._async_get_supported_webrtc_provider(
|
||||
async_get_supported_provider
|
||||
)
|
||||
|
||||
if new_provider is None:
|
||||
# Only add the legacy provider if the new provider is not available
|
||||
new_legacy_provider = await self._async_get_supported_webrtc_provider(
|
||||
async_get_supported_legacy_provider
|
||||
)
|
||||
|
||||
if old_provider != new_provider or old_legacy_provider != new_legacy_provider:
|
||||
self._webrtc_provider = new_provider
|
||||
self._legacy_webrtc_provider = new_legacy_provider
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
if write_state:
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _async_get_supported_webrtc_provider[_T](
|
||||
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
|
||||
) -> _T | None:
|
||||
"""Get first provider that supports this camera."""
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return []
|
||||
return None
|
||||
|
||||
return await async_get_supported_providers(self.hass, self)
|
||||
return await fn(self.hass, self)
|
||||
|
||||
@property
|
||||
def webrtc_providers(self) -> list[CameraWebRTCProvider]:
|
||||
"""Return the WebRTC providers."""
|
||||
return self._webrtc_providers
|
||||
|
||||
async def _async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration:
|
||||
@callback
|
||||
def _async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration:
|
||||
"""Return the WebRTC client configuration adjustable per integration."""
|
||||
return WebRTCClientConfiguration()
|
||||
|
||||
@final
|
||||
async def async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration:
|
||||
@callback
|
||||
def async_get_webrtc_client_configuration(self) -> WebRTCClientConfiguration:
|
||||
"""Return the WebRTC client configuration and extend it with the registered ice servers."""
|
||||
config = await self._async_get_webrtc_client_configuration()
|
||||
config = self._async_get_webrtc_client_configuration()
|
||||
|
||||
ice_servers = [
|
||||
server
|
||||
for servers in self.hass.data.get(DATA_ICE_SERVERS, [])
|
||||
for server in servers()
|
||||
]
|
||||
config.configuration.ice_servers.extend(ice_servers)
|
||||
if not self._supports_native_sync_webrtc:
|
||||
# Until 2024.11, the frontend was not resolving any ice servers
|
||||
# The async approach was added 2024.11 and new integrations need to use it
|
||||
ice_servers = [
|
||||
server
|
||||
for servers in self.hass.data.get(DATA_ICE_SERVERS, [])
|
||||
for server in servers()
|
||||
]
|
||||
config.configuration.ice_servers.extend(ice_servers)
|
||||
|
||||
config.get_candidates_upfront = (
|
||||
self._supports_native_sync_webrtc
|
||||
or self._legacy_webrtc_provider is not None
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
async def async_on_webrtc_candidate(
|
||||
self, session_id: str, candidate: RTCIceCandidate
|
||||
) -> None:
|
||||
"""Handle a WebRTC candidate."""
|
||||
if self._webrtc_provider:
|
||||
await self._webrtc_provider.async_on_webrtc_candidate(session_id, candidate)
|
||||
else:
|
||||
raise HomeAssistantError("Cannot handle WebRTC candidate")
|
||||
|
||||
@callback
|
||||
def close_webrtc_session(self, session_id: str) -> None:
|
||||
"""Close a WebRTC session."""
|
||||
if self._webrtc_provider:
|
||||
self._webrtc_provider.async_close_session(session_id)
|
||||
|
||||
@callback
|
||||
def _invalidate_camera_capabilities_cache(self) -> None:
|
||||
"""Invalidate the camera capabilities cache."""
|
||||
self._cache.pop("camera_capabilities", None)
|
||||
|
||||
@final
|
||||
@under_cached_property
|
||||
def camera_capabilities(self) -> CameraCapabilities:
|
||||
"""Return the camera capabilities."""
|
||||
frontend_stream_types = set()
|
||||
if CameraEntityFeature.STREAM in self.supported_features_compat:
|
||||
if self._supports_native_sync_webrtc or self._supports_native_async_webrtc:
|
||||
# The camera has a native WebRTC implementation
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
else:
|
||||
frontend_stream_types.add(StreamType.HLS)
|
||||
|
||||
if self._webrtc_provider:
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
|
||||
return CameraCapabilities(frontend_stream_types)
|
||||
|
||||
@callback
|
||||
def async_write_ha_state(self) -> None:
|
||||
"""Write the state to the state machine.
|
||||
|
||||
Schedules async_refresh_providers if support of streams have changed.
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
self.hass.async_create_task(self.async_refresh_providers())
|
||||
|
||||
|
||||
class CameraView(HomeAssistantView):
|
||||
"""Base CameraView."""
|
||||
@@ -843,6 +1006,24 @@ class CameraMjpegStream(CameraView):
|
||||
raise web.HTTPBadRequest from err
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "camera/capabilities",
|
||||
vol.Required("entity_id"): cv.entity_id,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_camera_capabilities(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle get camera capabilities websocket command.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
camera = get_camera_from_entity_id(hass, msg["entity_id"])
|
||||
connection.send_result(msg["id"], asdict(camera.camera_capabilities))
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "camera/stream",
|
||||
@@ -873,53 +1054,6 @@ async def ws_camera_stream(
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "camera/web_rtc_offer",
|
||||
vol.Required("entity_id"): cv.entity_id,
|
||||
vol.Required("offer"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_camera_web_rtc_offer(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle the signal path for a WebRTC stream.
|
||||
|
||||
This signal path is used to route the offer created by the client to the
|
||||
camera device through the integration for negotiation on initial setup,
|
||||
which returns an answer. The actual streaming is handled entirely between
|
||||
the client and camera device.
|
||||
|
||||
Async friendly.
|
||||
"""
|
||||
entity_id = msg["entity_id"]
|
||||
offer = msg["offer"]
|
||||
camera = get_camera_from_entity_id(hass, entity_id)
|
||||
if camera.frontend_stream_type != StreamType.WEB_RTC:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
"web_rtc_offer_failed",
|
||||
(
|
||||
"Camera does not support WebRTC,"
|
||||
f" frontend_stream_type={camera.frontend_stream_type}"
|
||||
),
|
||||
)
|
||||
return
|
||||
try:
|
||||
answer = await camera.async_handle_web_rtc_offer(offer)
|
||||
except (HomeAssistantError, ValueError) as ex:
|
||||
_LOGGER.error("Error handling WebRTC offer: %s", ex)
|
||||
connection.send_error(msg["id"], "web_rtc_offer_failed", str(ex))
|
||||
except TimeoutError:
|
||||
_LOGGER.error("Timeout handling WebRTC offer")
|
||||
connection.send_error(
|
||||
msg["id"], "web_rtc_offer_failed", "Timeout handling WebRTC offer"
|
||||
)
|
||||
else:
|
||||
connection.send_result(msg["id"], {"answer": answer})
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{vol.Required("type"): "camera/get_prefs", vol.Required("entity_id"): cv.entity_id}
|
||||
)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user