mirror of
https://github.com/home-assistant/core.git
synced 2025-11-01 23:19:22 +00:00
Compare commits
489 Commits
2025.1.0b7
...
revert-134
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
aeebf67575 | ||
|
|
6a4160bcc4 | ||
|
|
411d14c2ce | ||
|
|
d7315f4500 | ||
|
|
c4ac648a2b | ||
|
|
e9616f38d8 | ||
|
|
1550086dd6 | ||
|
|
8e28b7b49b | ||
|
|
4a33b1d936 | ||
|
|
8bfdbc173a | ||
|
|
3ce4c47cfc | ||
|
|
0d9ac25257 | ||
|
|
15e785b974 | ||
|
|
13527768cc | ||
|
|
071e675d9d | ||
|
|
316a61fcde | ||
|
|
9901f3c3dd | ||
|
|
c9d8c59b45 | ||
|
|
0184d8e954 | ||
|
|
2f892678f6 | ||
|
|
fe8cae8eb5 | ||
|
|
64752af4c2 | ||
|
|
c5f80dd01d | ||
|
|
2704090418 | ||
|
|
f01c860c44 | ||
|
|
bb4a497247 | ||
|
|
488c5a6b9f | ||
|
|
acbd501ede | ||
|
|
d06cd1ad3b | ||
|
|
4129697dd9 | ||
|
|
4086d092ff | ||
|
|
988a0639f4 | ||
|
|
c9c553047c | ||
|
|
f05cffea17 | ||
|
|
d2a188ad3c | ||
|
|
02e30edc6c | ||
|
|
0e52ea482f | ||
|
|
d46be61b6f | ||
|
|
f05e234c30 | ||
|
|
bc09e825a9 | ||
|
|
6f6d485530 | ||
|
|
63eb27df7b | ||
|
|
da29b2f711 | ||
|
|
c2f6f93f1d | ||
|
|
39143a2e79 | ||
|
|
99e65c38b0 | ||
|
|
ec7d2f3731 | ||
|
|
d43187327f | ||
|
|
8be01ac9d6 | ||
|
|
e052ab27f2 | ||
|
|
43ec63eabc | ||
|
|
7a2a6cf7d8 | ||
|
|
eff440d2a8 | ||
|
|
3fea4efb9f | ||
|
|
dc1928f3eb | ||
|
|
f8618e65f6 | ||
|
|
e99aaed7fa | ||
|
|
d000558227 | ||
|
|
7daf442271 | ||
|
|
b8f458458b | ||
|
|
85ecb04abf | ||
|
|
20db7fdc96 | ||
|
|
a1d43b9387 | ||
|
|
de9c05ad53 | ||
|
|
a01521b224 | ||
|
|
2413bb4f52 | ||
|
|
1496da8e94 | ||
|
|
802ad55493 | ||
|
|
48da88583f | ||
|
|
0ab66a4ed1 | ||
|
|
3b13c5bfdd | ||
|
|
42532e9695 | ||
|
|
0dd9845501 | ||
|
|
3a213b2d17 | ||
|
|
d155d93462 | ||
|
|
5888b83f22 | ||
|
|
471f77fea4 | ||
|
|
c684b06734 | ||
|
|
393551d696 | ||
|
|
24b81df0e6 | ||
|
|
a66cf62b09 | ||
|
|
901099325b | ||
|
|
30695cfef5 | ||
|
|
5d2a8e8208 | ||
|
|
4019045e7b | ||
|
|
ec2c8da1c5 | ||
|
|
d1e8a2a32d | ||
|
|
feeee2d15e | ||
|
|
8a052177a4 | ||
|
|
875727ed27 | ||
|
|
f1c62000e1 | ||
|
|
e38f21c4ef | ||
|
|
00c052bb22 | ||
|
|
111ef13a3f | ||
|
|
89c73f56b1 | ||
|
|
d13c14eedb | ||
|
|
9532e98166 | ||
|
|
6884d790ca | ||
|
|
6ab45f8c9e | ||
|
|
7009a96711 | ||
|
|
a47fa08a9b | ||
|
|
4eb23f3039 | ||
|
|
1c314b5c02 | ||
|
|
edee58f114 | ||
|
|
ef652e57d1 | ||
|
|
b956aa68da | ||
|
|
75ce89dc41 | ||
|
|
a9540e893f | ||
|
|
dd5625436b | ||
|
|
7a484ee0ae | ||
|
|
e5c5d1bcfd | ||
|
|
56a9cd010e | ||
|
|
b7b5577f0c | ||
|
|
0787257cc0 | ||
|
|
54263f1325 | ||
|
|
14d2f2c589 | ||
|
|
c533f63a87 | ||
|
|
cd30f75be9 | ||
|
|
527775a5f1 | ||
|
|
99d7f462a0 | ||
|
|
67e2379d2b | ||
|
|
fb0047ead0 | ||
|
|
9764d704bd | ||
|
|
3690d7c2b4 | ||
|
|
204b5989e0 | ||
|
|
3892f6d8f3 | ||
|
|
140ff50eaf | ||
|
|
5ef06b1f33 | ||
|
|
9638bee8de | ||
|
|
cd88a8cebd | ||
|
|
d896b4e66a | ||
|
|
e4eb414be8 | ||
|
|
fce5be928e | ||
|
|
c4455c709b | ||
|
|
2c7a1446b8 | ||
|
|
20cf21d88e | ||
|
|
eafbf1d1fd | ||
|
|
acd95975e4 | ||
|
|
bc22e34fc3 | ||
|
|
bf0cf1c30f | ||
|
|
e95bfe438b | ||
|
|
0a457979ec | ||
|
|
2f295efb3f | ||
|
|
74613ae0c4 | ||
|
|
4d4cfabfba | ||
|
|
7ae81bae4c | ||
|
|
7ec10bfd6f | ||
|
|
d662a4465c | ||
|
|
66b4b24612 | ||
|
|
a2077405e2 | ||
|
|
f0a1a6c2ad | ||
|
|
32b7b5aa66 | ||
|
|
871a7d0dc1 | ||
|
|
da807001ab | ||
|
|
a104799893 | ||
|
|
45d1624d70 | ||
|
|
1059cf3f07 | ||
|
|
dd34a10934 | ||
|
|
d4f3dd2335 | ||
|
|
0ecb1ea8cf | ||
|
|
3d5a42749d | ||
|
|
a2c2d37eb1 | ||
|
|
f68c16586d | ||
|
|
11d80065ef | ||
|
|
7012648bf8 | ||
|
|
d96b2499e2 | ||
|
|
a41bdfe0cc | ||
|
|
0d3872a4c7 | ||
|
|
65d8d071dd | ||
|
|
bb97a16756 | ||
|
|
c9a607aa45 | ||
|
|
c7993eff99 | ||
|
|
8a880d6134 | ||
|
|
cc0fb80481 | ||
|
|
276806d3e1 | ||
|
|
0589df7d95 | ||
|
|
aab676a313 | ||
|
|
7f473b8260 | ||
|
|
fea4a00424 | ||
|
|
7d146ddae0 | ||
|
|
8f06e0903f | ||
|
|
677ba3a6a6 | ||
|
|
a322deaab8 | ||
|
|
584439cade | ||
|
|
baa13debcc | ||
|
|
1d42890748 | ||
|
|
622d23cadd | ||
|
|
ebeb2ecb09 | ||
|
|
b3cb2928fc | ||
|
|
b639466453 | ||
|
|
69241e4ca6 | ||
|
|
80371a865e | ||
|
|
c9dbb205dd | ||
|
|
197ff932af | ||
|
|
287b7eec13 | ||
|
|
e6da6d9612 | ||
|
|
d4f38099ae | ||
|
|
9f2cb7bf56 | ||
|
|
8a84abd50f | ||
|
|
b15e08ca9c | ||
|
|
3fb980901e | ||
|
|
bd3a3fd26c | ||
|
|
dfcb977a1d | ||
|
|
94ad6ae814 | ||
|
|
97aa93f92b | ||
|
|
ee025198e8 | ||
|
|
90265e2afd | ||
|
|
a53554dad3 | ||
|
|
2b6ad84cf5 | ||
|
|
92655fd640 | ||
|
|
e43f72c452 | ||
|
|
9320ccfa4f | ||
|
|
336af8b551 | ||
|
|
8a2f8dc736 | ||
|
|
dc048bfcf5 | ||
|
|
fb474827b5 | ||
|
|
eec5fb2133 | ||
|
|
8ad7c522f4 | ||
|
|
c7f6630718 | ||
|
|
afa95293dc | ||
|
|
36582f9ac2 | ||
|
|
19852ecc24 | ||
|
|
5726d090b0 | ||
|
|
add401ffcf | ||
|
|
fd12ae2ccd | ||
|
|
e15eda3aa2 | ||
|
|
cc0adcf47f | ||
|
|
06580ce10f | ||
|
|
b78e39da2d | ||
|
|
46824a2a53 | ||
|
|
ee01289ee8 | ||
|
|
0bd22eabc7 | ||
|
|
c901352bef | ||
|
|
23ed62c1bc | ||
|
|
0ef254bc9a | ||
|
|
629d108078 | ||
|
|
6f3544fa47 | ||
|
|
cb389d29ea | ||
|
|
ac26ca2da5 | ||
|
|
d5bcb73d33 | ||
|
|
e6a18357db | ||
|
|
13ec0659ff | ||
|
|
a7fb20ab58 | ||
|
|
657da47458 | ||
|
|
a4708876a9 | ||
|
|
4239c5b557 | ||
|
|
836354bb99 | ||
|
|
a7af042e57 | ||
|
|
09476ade82 | ||
|
|
25937d7868 | ||
|
|
4e74d14beb | ||
|
|
309b7eb436 | ||
|
|
cf238cd8f7 | ||
|
|
ee46edffa3 | ||
|
|
876b3423ba | ||
|
|
2752a35e23 | ||
|
|
9e8df72c0d | ||
|
|
5439613bff | ||
|
|
3b5455bc49 | ||
|
|
104151d322 | ||
|
|
a329828bdf | ||
|
|
aa9e721e8b | ||
|
|
1b49f88be9 | ||
|
|
c345f2d548 | ||
|
|
1d731875ae | ||
|
|
0c3489c1b3 | ||
|
|
c5865c6d18 | ||
|
|
e1a0fb2f1a | ||
|
|
d725cdae13 | ||
|
|
e1bd82ea32 | ||
|
|
4bcc551b61 | ||
|
|
08019e76d8 | ||
|
|
0b32342bf0 | ||
|
|
add4e1a708 | ||
|
|
fb3105bdc0 | ||
|
|
3845acd0ce | ||
|
|
b45c68554c | ||
|
|
8a45aa4c42 | ||
|
|
51ccba12af | ||
|
|
c8699dc066 | ||
|
|
87454babfa | ||
|
|
c9ff575628 | ||
|
|
877d16273b | ||
|
|
dc5bfba902 | ||
|
|
5e7a405f34 | ||
|
|
5228f3d85c | ||
|
|
2efc75fdf5 | ||
|
|
a435fd12f0 | ||
|
|
a5d0c3528c | ||
|
|
5e981d00a4 | ||
|
|
97dc72a6e2 | ||
|
|
088b097a03 | ||
|
|
85c94e6403 | ||
|
|
a2ef1604af | ||
|
|
55dc4b0d2c | ||
|
|
18e8a3b185 | ||
|
|
3a68a0a67f | ||
|
|
7ab2d2e07a | ||
|
|
809629c0e2 | ||
|
|
2be578a33f | ||
|
|
5cff79ce50 | ||
|
|
513c8487c5 | ||
|
|
031de8da51 | ||
|
|
2e1463b9e9 | ||
|
|
9a58440296 | ||
|
|
26e0fcdb08 | ||
|
|
e835e41d59 | ||
|
|
c53c0a13be | ||
|
|
8098122dfe | ||
|
|
1d6ecbd1d5 | ||
|
|
c8276ec325 | ||
|
|
ddfad614ab | ||
|
|
8eb21749b5 | ||
|
|
a6ba25d3d4 | ||
|
|
1e70a0060b | ||
|
|
6c47f03d17 | ||
|
|
2054988790 | ||
|
|
f1ad3040b8 | ||
|
|
53ca31c112 | ||
|
|
23459a0355 | ||
|
|
a8bfe285bf | ||
|
|
0888d1a169 | ||
|
|
8b20272272 | ||
|
|
06b33e5589 | ||
|
|
9348569f90 | ||
|
|
4a9d545ffe | ||
|
|
277ee03145 | ||
|
|
6c9c17f129 | ||
|
|
bf59241dab | ||
|
|
57b7635b70 | ||
|
|
4b96266647 | ||
|
|
6266a4153d | ||
|
|
a9949a0aab | ||
|
|
428a74fa48 | ||
|
|
9f1023b195 | ||
|
|
256fc54aa1 | ||
|
|
94c1b9a434 | ||
|
|
275c15e2ae | ||
|
|
9cdbcd93cd | ||
|
|
f2e856b8a2 | ||
|
|
820f04e1e1 | ||
|
|
b7541f098c | ||
|
|
a345e80368 | ||
|
|
7a3d9a9345 | ||
|
|
a0fb6df5ba | ||
|
|
04020d5a56 | ||
|
|
f785b17314 | ||
|
|
6631c57cfb | ||
|
|
bc76dc3c34 | ||
|
|
ea4931ca3a | ||
|
|
dd20204bf0 | ||
|
|
ef46c62bc6 | ||
|
|
2bb6e03a36 | ||
|
|
2288f89415 | ||
|
|
e7ab5afc14 | ||
|
|
4db88dfaff | ||
|
|
906c95048c | ||
|
|
df38c1b1d7 | ||
|
|
af97bf1c5f | ||
|
|
a7c2d96ecf | ||
|
|
1b06b4e45b | ||
|
|
b74b9bc360 | ||
|
|
810689ce66 | ||
|
|
249d93574a | ||
|
|
e2c59f276a | ||
|
|
9804e8aa98 | ||
|
|
53e69af088 | ||
|
|
1530edbe20 | ||
|
|
7dbf32d693 | ||
|
|
49646ad994 | ||
|
|
1e652db37f | ||
|
|
88d366b0c5 | ||
|
|
65147f8d4c | ||
|
|
52b919101a | ||
|
|
24fd74d839 | ||
|
|
2599faa622 | ||
|
|
3df91cfba5 | ||
|
|
d3fab42c85 | ||
|
|
beb881492a | ||
|
|
9d7c7f9fcf | ||
|
|
419307a7c4 | ||
|
|
409dc4ad48 | ||
|
|
7704ef95a4 | ||
|
|
0db07a033b | ||
|
|
4717eb3142 | ||
|
|
c23f5c9f2c | ||
|
|
873b078bb3 | ||
|
|
0dd93a18c5 | ||
|
|
da96e2077b | ||
|
|
1d69cf11a5 | ||
|
|
adb1fbbbc4 | ||
|
|
645f2e44b9 | ||
|
|
b3aede611a | ||
|
|
72a96249b1 | ||
|
|
80dbce14ec | ||
|
|
0376f75ee3 | ||
|
|
e58bd62c68 | ||
|
|
6dbcd130b0 | ||
|
|
4639f57014 | ||
|
|
4080455c12 | ||
|
|
df7d518f38 | ||
|
|
47adfb574f | ||
|
|
4c5d0c2ec4 | ||
|
|
4febe43021 | ||
|
|
af13979855 | ||
|
|
d9f2140df3 | ||
|
|
cc80108629 | ||
|
|
16af76b968 | ||
|
|
590f0ce61f | ||
|
|
14059c6df8 | ||
|
|
268c21addd | ||
|
|
565fa4ea1f | ||
|
|
28cd7f2473 | ||
|
|
aceb1b39ba | ||
|
|
6edf06f8a4 | ||
|
|
07ae9b15d0 | ||
|
|
d676169b04 | ||
|
|
24ce3d7daa | ||
|
|
417e736746 | ||
|
|
bb8d4ca255 | ||
|
|
375af6cb1c | ||
|
|
263e0acd3a | ||
|
|
da531d0e4e | ||
|
|
844e36c8fe | ||
|
|
9976c07f89 | ||
|
|
7df9d2e938 | ||
|
|
52318f5f37 | ||
|
|
b9c2b3f7e3 | ||
|
|
a9ff5b8007 | ||
|
|
7076ba7c9d | ||
|
|
5e0088feaa | ||
|
|
f8399b2c0f | ||
|
|
415fdf4956 | ||
|
|
ad89004189 | ||
|
|
b6afbe4b29 | ||
|
|
402340955e | ||
|
|
b2a160d926 | ||
|
|
9840785363 | ||
|
|
a53c92d4b5 | ||
|
|
adc97b6c15 | ||
|
|
7b2a5d0684 | ||
|
|
acb511d395 | ||
|
|
c025390c6c | ||
|
|
942fbdedcf | ||
|
|
3bfb6707e9 | ||
|
|
5172139579 | ||
|
|
cfb43c7b58 | ||
|
|
45657ece7c | ||
|
|
f7fe2f2122 | ||
|
|
c75222e63c | ||
|
|
299250ebec | ||
|
|
ed8e242049 | ||
|
|
95e4a40ad5 | ||
|
|
e61717ce7a | ||
|
|
73b6bd8bd3 | ||
|
|
60774c69cd | ||
|
|
c383b41a12 | ||
|
|
05a8b773b9 | ||
|
|
1bee423c22 | ||
|
|
687afd23bc | ||
|
|
0020c48a15 | ||
|
|
760cbcc596 | ||
|
|
da8f4e5b57 | ||
|
|
5c0659c8df | ||
|
|
15806c2af6 | ||
|
|
97d8d16cc5 | ||
|
|
33435fa36f | ||
|
|
6fc1cfded9 | ||
|
|
a9d6a42781 | ||
|
|
f2a706ecf7 | ||
|
|
4a2ae7f6fd | ||
|
|
771ead9d7b | ||
|
|
2d5e2aa4b4 | ||
|
|
6f11524b84 | ||
|
|
561f319e3b | ||
|
|
0c9ec4b699 | ||
|
|
cbb2930805 | ||
|
|
aa29a93fbe | ||
|
|
ff4ba553c4 | ||
|
|
2f101c5054 | ||
|
|
72e2b835d9 | ||
|
|
8f6e4cd294 | ||
|
|
bd0edd4996 | ||
|
|
3f441e7090 | ||
|
|
253098d79c | ||
|
|
53ebf84339 | ||
|
|
7cfbc3eeae | ||
|
|
8d32531bc1 | ||
|
|
30d95f37d8 |
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.1"
|
||||
HA_SHORT_VERSION: "2025.2"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
|
||||
37
.github/workflows/wheels.yml
vendored
37
.github/workflows/wheels.yml
vendored
@@ -76,8 +76,20 @@ jobs:
|
||||
|
||||
# Use C-Extension for SQLAlchemy
|
||||
echo "REQUIRE_SQLALCHEMY_CEXT=1"
|
||||
|
||||
# Add additional pip wheel build constraints
|
||||
echo "PIP_CONSTRAINT=build_constraints.txt"
|
||||
) > .env_file
|
||||
|
||||
- name: Write pip wheel build constraints
|
||||
run: |
|
||||
(
|
||||
# ninja 1.11.1.2 + 1.11.1.3 seem to be broken on at least armhf
|
||||
# this caused the numpy builds to fail
|
||||
# https://github.com/scikit-build/ninja-python-distributions/issues/274
|
||||
echo "ninja==1.11.1.1"
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
@@ -86,6 +98,13 @@ jobs:
|
||||
include-hidden-files: true
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.5.0
|
||||
with:
|
||||
@@ -123,6 +142,11 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@@ -142,7 +166,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;multidict;propcache;yarl;SQLAlchemy
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -167,6 +191,11 @@ jobs:
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
@@ -205,7 +234,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -219,7 +248,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -233,7 +262,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.8.3
|
||||
rev: v0.8.6
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
||||
@@ -291,6 +291,7 @@ homeassistant.components.lcn.*
|
||||
homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.letpot.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
@@ -311,6 +312,7 @@ homeassistant.components.manual.*
|
||||
homeassistant.components.mastodon.*
|
||||
homeassistant.components.matrix.*
|
||||
homeassistant.components.matter.*
|
||||
homeassistant.components.mcp_server.*
|
||||
homeassistant.components.mealie.*
|
||||
homeassistant.components.media_extractor.*
|
||||
homeassistant.components.media_player.*
|
||||
@@ -362,7 +364,9 @@ homeassistant.components.openuv.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.peblar.*
|
||||
homeassistant.components.peco.*
|
||||
@@ -380,6 +384,7 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
homeassistant.components.rabbitair.*
|
||||
homeassistant.components.radarr.*
|
||||
|
||||
26
CODEOWNERS
26
CODEOWNERS
@@ -637,6 +637,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant_sky_connect/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_yellow/ @home-assistant/core
|
||||
/tests/components/homeassistant_yellow/ @home-assistant/core
|
||||
/homeassistant/components/homee/ @Taraman17
|
||||
/tests/components/homee/ @Taraman17
|
||||
/homeassistant/components/homekit/ @bdraco
|
||||
/tests/components/homekit/ @bdraco
|
||||
/homeassistant/components/homekit_controller/ @Jc2k @bdraco
|
||||
@@ -686,6 +688,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/icloud/ @Quentame @nzapponi
|
||||
/homeassistant/components/idasen_desk/ @abmantis
|
||||
/tests/components/idasen_desk/ @abmantis
|
||||
/homeassistant/components/igloohome/ @keithle888
|
||||
/tests/components/igloohome/ @keithle888
|
||||
/homeassistant/components/ign_sismologia/ @exxamalte
|
||||
/tests/components/ign_sismologia/ @exxamalte
|
||||
/homeassistant/components/image/ @home-assistant/core
|
||||
@@ -827,6 +831,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/led_ble/ @bdraco
|
||||
/homeassistant/components/lektrico/ @lektrico
|
||||
/tests/components/lektrico/ @lektrico
|
||||
/homeassistant/components/letpot/ @jpelgrom
|
||||
/tests/components/letpot/ @jpelgrom
|
||||
/homeassistant/components/lg_netcast/ @Drafteed @splinter98
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
@@ -887,6 +893,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/matrix/ @PaarthShah
|
||||
/homeassistant/components/matter/ @home-assistant/matter
|
||||
/tests/components/matter/ @home-assistant/matter
|
||||
/homeassistant/components/mcp_server/ @allenporter
|
||||
/tests/components/mcp_server/ @allenporter
|
||||
/homeassistant/components/mealie/ @joostlek @andrew-codechimp
|
||||
/tests/components/mealie/ @joostlek @andrew-codechimp
|
||||
/homeassistant/components/meater/ @Sotolotl @emontnemery
|
||||
@@ -1103,8 +1111,10 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/otbr/ @home-assistant/core
|
||||
/homeassistant/components/ourgroceries/ @OnFreund
|
||||
/tests/components/ourgroceries/ @OnFreund
|
||||
/homeassistant/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/tests/components/overkiz/ @imicknl @vlebourl @tetienne @nyroDev @tronix117 @alexfp14
|
||||
/homeassistant/components/overkiz/ @imicknl
|
||||
/tests/components/overkiz/ @imicknl
|
||||
/homeassistant/components/overseerr/ @joostlek
|
||||
/tests/components/overseerr/ @joostlek
|
||||
/homeassistant/components/ovo_energy/ @timmo001
|
||||
/tests/components/ovo_energy/ @timmo001
|
||||
/homeassistant/components/p1_monitor/ @klaasnicolaas
|
||||
@@ -1135,8 +1145,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/plaato/ @JohNan
|
||||
/homeassistant/components/plex/ @jjlawren
|
||||
/tests/components/plex/ @jjlawren
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew @frenck
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew @frenck
|
||||
/homeassistant/components/plugwise/ @CoMPaTech @bouwew
|
||||
/tests/components/plugwise/ @CoMPaTech @bouwew
|
||||
/homeassistant/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/tests/components/plum_lightpad/ @ColinHarrington @prystupa
|
||||
/homeassistant/components/point/ @fredrike
|
||||
@@ -1478,8 +1488,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/system_bridge/ @timmo001
|
||||
/homeassistant/components/systemmonitor/ @gjohansson-ST
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @chiefdragon @erwindouna
|
||||
/tests/components/tado/ @chiefdragon @erwindouna
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
@@ -1573,8 +1583,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/triggercmd/ @rvmey
|
||||
/homeassistant/components/tts/ @home-assistant/core
|
||||
/tests/components/tts/ @home-assistant/core
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
/tests/components/tuya/ @Tuya @zlinoliver @frenck
|
||||
/homeassistant/components/tuya/ @Tuya @zlinoliver
|
||||
/tests/components/tuya/ @Tuya @zlinoliver
|
||||
/homeassistant/components/twentemilieu/ @frenck
|
||||
/tests/components/twentemilieu/ @frenck
|
||||
/homeassistant/components/twinkly/ @dr1rrb @Robbie1221 @Olen
|
||||
|
||||
@@ -89,7 +89,7 @@ from .helpers import (
|
||||
)
|
||||
from .helpers.dispatcher import async_dispatcher_send_internal
|
||||
from .helpers.storage import get_internal_store_manager
|
||||
from .helpers.system_info import async_get_system_info, is_official_image
|
||||
from .helpers.system_info import async_get_system_info
|
||||
from .helpers.typing import ConfigType
|
||||
from .setup import (
|
||||
# _setup_started is marked as protected to make it clear
|
||||
@@ -106,6 +106,7 @@ from .util.async_ import create_eager_task
|
||||
from .util.hass_dict import HassKey
|
||||
from .util.logging import async_activate_log_queue_handler
|
||||
from .util.package import async_get_user_site, is_docker_env, is_virtual_env
|
||||
from .util.system_info import is_official_image
|
||||
|
||||
with contextlib.suppress(ImportError):
|
||||
# Ensure anyio backend is imported to avoid it being imported in the event loop
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
"domain": "microsoft",
|
||||
"name": "Microsoft",
|
||||
"integrations": [
|
||||
"azure_data_explorer",
|
||||
"azure_devops",
|
||||
"azure_event_hub",
|
||||
"azure_service_bus",
|
||||
|
||||
@@ -34,17 +34,17 @@
|
||||
"services": {
|
||||
"capture_image": {
|
||||
"name": "Capture image",
|
||||
"description": "Request a new image capture from a camera device.",
|
||||
"description": "Requests a new image capture from a camera device.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id of the camera to request an image."
|
||||
"description": "Entity ID of the camera to request an image from."
|
||||
}
|
||||
}
|
||||
},
|
||||
"change_setting": {
|
||||
"name": "Change setting",
|
||||
"description": "Change an Abode system setting.",
|
||||
"description": "Changes an Abode system setting.",
|
||||
"fields": {
|
||||
"setting": {
|
||||
"name": "Setting",
|
||||
@@ -58,11 +58,11 @@
|
||||
},
|
||||
"trigger_automation": {
|
||||
"name": "Trigger automation",
|
||||
"description": "Trigger an Abode automation.",
|
||||
"description": "Triggers an Abode automation.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
"description": "Entity id of the automation to trigger."
|
||||
"description": "Entity ID of the automation to trigger."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,5 +26,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aioacaia"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioacaia==0.1.11"]
|
||||
"requirements": ["aioacaia==0.1.13"]
|
||||
}
|
||||
|
||||
@@ -39,45 +39,54 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
key="temp",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"light": SensorEntityDescription(
|
||||
key="light",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
translation_key="light",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"virusRisk": SensorEntityDescription(
|
||||
key="virusRisk",
|
||||
translation_key="virus_risk",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"mold": SensorEntityDescription(
|
||||
key="mold",
|
||||
translation_key="mold",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"rssi": SensorEntityDescription(
|
||||
key="rssi",
|
||||
@@ -85,16 +94,19 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm1": SensorEntityDescription(
|
||||
key="pm1",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM1,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
"pm25": SensorEntityDescription(
|
||||
key="pm25",
|
||||
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -67,18 +67,21 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"humidity": SensorEntityDescription(
|
||||
key="humidity",
|
||||
device_class=SensorDeviceClass.HUMIDITY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"pressure": SensorEntityDescription(
|
||||
key="pressure",
|
||||
device_class=SensorDeviceClass.ATMOSPHERIC_PRESSURE,
|
||||
native_unit_of_measurement=UnitOfPressure.MBAR,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
"battery": SensorEntityDescription(
|
||||
key="battery",
|
||||
@@ -86,24 +89,28 @@ SENSORS_MAPPING_TEMPLATE: dict[str, SensorEntityDescription] = {
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"co2": SensorEntityDescription(
|
||||
key="co2",
|
||||
device_class=SensorDeviceClass.CO2,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"voc": SensorEntityDescription(
|
||||
key="voc",
|
||||
device_class=SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS_PARTS,
|
||||
native_unit_of_measurement=CONCENTRATION_PARTS_PER_BILLION,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
"illuminance": SensorEntityDescription(
|
||||
key="illuminance",
|
||||
translation_key="illuminance",
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=0,
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["python_homeassistant_analytics"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.0"],
|
||||
"requirements": ["python-homeassistant-analytics==0.8.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -21,7 +21,7 @@
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -38,17 +38,17 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
},
|
||||
"rules": {
|
||||
"title": "Configure Android state detection rules",
|
||||
"description": "Configure detection rule for application id {rule_id}",
|
||||
"description": "Configure detection rule for application ID {rule_id}",
|
||||
"data": {
|
||||
"rule_id": "[%key:component::androidtv::options::step::apps::data::app_id%]",
|
||||
"rule_values": "List of state detection rules (see documentation)",
|
||||
|
||||
@@ -156,7 +156,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# and one of them, which could end up being in discovery_info.host, is from a
|
||||
# different device. If any of the discovery_info.ip_addresses matches the
|
||||
# existing host, don't update the host.
|
||||
if existing_config_entry and len(discovery_info.ip_addresses) > 1:
|
||||
if (
|
||||
existing_config_entry
|
||||
# Ignored entries don't have host
|
||||
and CONF_HOST in existing_config_entry.data
|
||||
and len(discovery_info.ip_addresses) > 1
|
||||
):
|
||||
existing_host = existing_config_entry.data[CONF_HOST]
|
||||
if existing_host != self.host:
|
||||
if existing_host in [
|
||||
|
||||
@@ -44,12 +44,12 @@
|
||||
}
|
||||
},
|
||||
"apps": {
|
||||
"title": "Configure Android Apps",
|
||||
"description": "Configure application id {app_id}",
|
||||
"title": "Configure Android apps",
|
||||
"description": "Configure application ID {app_id}",
|
||||
"data": {
|
||||
"app_name": "Application Name",
|
||||
"app_name": "Application name",
|
||||
"app_id": "Application ID",
|
||||
"app_icon": "Application Icon",
|
||||
"app_icon": "Application icon",
|
||||
"app_delete": "Check to delete this application"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,7 +98,6 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
VERSION = 1
|
||||
|
||||
scan_filter: str | None = None
|
||||
all_identifiers: set[str]
|
||||
atv: BaseConfig | None = None
|
||||
atv_identifiers: list[str] | None = None
|
||||
_host: str # host in zeroconf discovery info, should not be accessed by other flows
|
||||
@@ -118,6 +117,7 @@ class AppleTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a new AppleTVConfigFlow."""
|
||||
self.credentials: dict[int, str | None] = {} # Protocol -> credentials
|
||||
self.all_identifiers: set[str] = set()
|
||||
|
||||
@property
|
||||
def device_identifier(self) -> str | None:
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["apprise"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["apprise==1.9.0"]
|
||||
"requirements": ["apprise==1.9.1"]
|
||||
}
|
||||
|
||||
@@ -120,6 +120,8 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
"""Wait for the client to be ready."""
|
||||
|
||||
if not self.data or Attribute.MAC_ADDRESS not in self.data:
|
||||
await self.client.read_mac_address()
|
||||
|
||||
data = await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 2, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -130,12 +132,9 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
|
||||
return False
|
||||
|
||||
if not self.data or Attribute.NAME not in self.data:
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.IDENTIFICATION, 4, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
if not self.data or Attribute.THERMOSTAT_MODES not in self.data:
|
||||
await self.client.read_thermostat_iaq_available()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.CONTROL, 7, WAIT_TIMEOUT
|
||||
)
|
||||
@@ -144,10 +143,16 @@ class AprilaireCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
not self.data
|
||||
or Attribute.INDOOR_TEMPERATURE_CONTROLLING_SENSOR_STATUS not in self.data
|
||||
):
|
||||
await self.client.read_sensors()
|
||||
|
||||
await self.client.wait_for_response(
|
||||
FunctionalDomain.SENSORS, 2, WAIT_TIMEOUT
|
||||
)
|
||||
|
||||
await self.client.read_thermostat_status()
|
||||
|
||||
await self.client.read_iaq_status()
|
||||
|
||||
await ready_callback(True)
|
||||
|
||||
return True
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyaprilaire"],
|
||||
"requirements": ["pyaprilaire==0.7.4"]
|
||||
"requirements": ["pyaprilaire==0.7.7"]
|
||||
}
|
||||
|
||||
@@ -29,6 +29,8 @@ class ApSystemsSensorData:
|
||||
class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
"""Coordinator used for all sensors."""
|
||||
|
||||
device_version: str
|
||||
|
||||
def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None:
|
||||
"""Initialize my coordinator."""
|
||||
super().__init__(
|
||||
@@ -46,6 +48,7 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]):
|
||||
raise UpdateFailed from None
|
||||
self.api.max_power = device_info.maxPower
|
||||
self.api.min_power = device_info.minPower
|
||||
self.device_version = device_info.devVer
|
||||
|
||||
async def _async_update_data(self) -> ApSystemsSensorData:
|
||||
try:
|
||||
|
||||
@@ -21,7 +21,8 @@ class ApSystemsEntity(Entity):
|
||||
"""Initialize the APsystems entity."""
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, data.device_id)},
|
||||
serial_number=data.device_id,
|
||||
manufacturer="APsystems",
|
||||
model="EZ1-M",
|
||||
serial_number=data.device_id,
|
||||
sw_version=data.coordinator.device_version.split(" ")[1],
|
||||
)
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
|
||||
from homeassistant.components.number import NumberDeviceClass, NumberEntity, NumberMode
|
||||
from homeassistant.const import UnitOfPower
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -45,7 +47,13 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Set the state with the value fetched from the inverter."""
|
||||
self._attr_native_value = await self._api.get_max_power()
|
||||
try:
|
||||
status = await self._api.get_max_power()
|
||||
except (TimeoutError, ClientConnectorError):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = True
|
||||
self._attr_native_value = status
|
||||
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Set the desired output power."""
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aranet",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["aranet4==2.4.0"]
|
||||
"requirements": ["aranet4==2.5.0"]
|
||||
}
|
||||
|
||||
@@ -22,6 +22,7 @@ from homeassistant.components.sensor import (
|
||||
)
|
||||
from homeassistant.const import (
|
||||
ATTR_MANUFACTURER,
|
||||
ATTR_MODEL,
|
||||
ATTR_NAME,
|
||||
ATTR_SW_VERSION,
|
||||
CONCENTRATION_PARTS_PER_MILLION,
|
||||
@@ -142,6 +143,7 @@ def _sensor_device_info_to_hass(
|
||||
if adv.readings and adv.readings.name:
|
||||
hass_device_info[ATTR_NAME] = adv.readings.name
|
||||
hass_device_info[ATTR_MANUFACTURER] = ARANET_MANUFACTURER_NAME
|
||||
hass_device_info[ATTR_MODEL] = adv.readings.type.model
|
||||
if adv.manufacturer_data:
|
||||
hass_device_info[ATTR_SW_VERSION] = str(adv.manufacturer_data.version)
|
||||
return hass_device_info
|
||||
|
||||
@@ -90,7 +90,7 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
"""Retrieve data from Aruba Access Point and return parsed result."""
|
||||
|
||||
connect = f"ssh {self.username}@{self.host} -o HostKeyAlgorithms=ssh-rsa"
|
||||
ssh = pexpect.spawn(connect)
|
||||
ssh: pexpect.spawn[str] = pexpect.spawn(connect, encoding="utf-8")
|
||||
query = ssh.expect(
|
||||
[
|
||||
"password:",
|
||||
@@ -125,12 +125,12 @@ class ArubaDeviceScanner(DeviceScanner):
|
||||
ssh.expect("#")
|
||||
ssh.sendline("show clients")
|
||||
ssh.expect("#")
|
||||
devices_result = ssh.before.split(b"\r\n")
|
||||
devices_result = (ssh.before or "").splitlines()
|
||||
ssh.sendline("exit")
|
||||
|
||||
devices: dict[str, dict[str, str]] = {}
|
||||
for device in devices_result:
|
||||
if match := _DEVICES_REGEX.search(device.decode("utf-8")):
|
||||
if match := _DEVICES_REGEX.search(device):
|
||||
devices[match.group("ip")] = {
|
||||
"ip": match.group("ip"),
|
||||
"mac": match.group("mac").upper(),
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
"requirements": ["pexpect==4.9.0"]
|
||||
}
|
||||
|
||||
@@ -108,6 +108,7 @@ async def async_pipeline_from_audio_stream(
|
||||
device_id: str | None = None,
|
||||
start_stage: PipelineStage = PipelineStage.STT,
|
||||
end_stage: PipelineStage = PipelineStage.TTS,
|
||||
conversation_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Create an audio pipeline from an audio stream.
|
||||
|
||||
@@ -119,6 +120,7 @@ async def async_pipeline_from_audio_stream(
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
conversation_extra_system_prompt=conversation_extra_system_prompt,
|
||||
run=PipelineRun(
|
||||
hass,
|
||||
context=context,
|
||||
|
||||
@@ -1010,7 +1010,11 @@ class PipelineRun:
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
async def recognize_intent(
|
||||
self, intent_input: str, conversation_id: str | None, device_id: str | None
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str | None,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None:
|
||||
@@ -1045,6 +1049,7 @@ class PipelineRun:
|
||||
device_id=device_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
processed_locally = self.intent_agent == conversation.HOME_ASSISTANT_AGENT
|
||||
|
||||
@@ -1392,8 +1397,13 @@ class PipelineInput:
|
||||
"""Input for text-to-speech. Required when start_stage = tts."""
|
||||
|
||||
conversation_id: str | None = None
|
||||
"""Identifier for the conversation."""
|
||||
|
||||
conversation_extra_system_prompt: str | None = None
|
||||
"""Extra prompt information for the conversation agent."""
|
||||
|
||||
device_id: str | None = None
|
||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
@@ -1483,6 +1493,7 @@ class PipelineInput:
|
||||
intent_input,
|
||||
self.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
current_stage = PipelineStage.TTS
|
||||
|
||||
@@ -75,7 +75,7 @@ class AudioBuffer:
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands."""
|
||||
|
||||
speech_seconds: float = 0.1
|
||||
speech_seconds: float = 0.3
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
command_seconds: float = 1.0
|
||||
|
||||
@@ -31,8 +31,8 @@
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"abort": {
|
||||
"invalid_unique_id": "Impossible to determine a valid unique id for the device",
|
||||
"no_unique_id": "A device without a valid unique id is already configured. Configuration of multiple instance is not possible"
|
||||
"invalid_unique_id": "Impossible to determine a valid unique ID for the device",
|
||||
"no_unique_id": "A device without a valid unique ID is already configured. Configuration of multiple instances is not possible"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
@@ -42,7 +42,7 @@
|
||||
"consider_home": "Seconds to wait before considering a device away",
|
||||
"track_unknown": "Track unknown / unnamed devices",
|
||||
"interface": "The interface that you want statistics from (e.g. eth0, eth1 etc)",
|
||||
"dnsmasq": "The location in the router of the dnsmasq.leases files",
|
||||
"dnsmasq": "The location of the dnsmasq.leases file in the router",
|
||||
"require_ip": "Devices must have IP (for access point mode)"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/aussie_broadband",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aussiebb"],
|
||||
"requirements": ["pyaussiebb==0.1.4"]
|
||||
"requirements": ["pyaussiebb==0.1.5"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,10 @@ from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
# Pre-import backup to avoid it being imported
|
||||
# later when the import executor is busy and delaying
|
||||
# startup
|
||||
from . import backup # noqa: F401
|
||||
from .agent import (
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
@@ -17,8 +21,10 @@ from .manager import (
|
||||
BackupManager,
|
||||
BackupPlatformProtocol,
|
||||
BackupReaderWriter,
|
||||
BackupReaderWriterError,
|
||||
CoreBackupReaderWriter,
|
||||
CreateBackupEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
NewBackup,
|
||||
WrittenBackup,
|
||||
@@ -35,8 +41,10 @@ __all__ = [
|
||||
"BackupAgentPlatformProtocol",
|
||||
"BackupPlatformProtocol",
|
||||
"BackupReaderWriter",
|
||||
"BackupReaderWriterError",
|
||||
"CreateBackupEvent",
|
||||
"Folder",
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"NewBackup",
|
||||
"WrittenBackup",
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass, field, replace
|
||||
from datetime import datetime, timedelta
|
||||
from enum import StrEnum
|
||||
import random
|
||||
from typing import TYPE_CHECKING, Self, TypedDict
|
||||
|
||||
from cronsim import CronSim
|
||||
@@ -17,7 +18,7 @@ from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .models import Folder
|
||||
from .models import BackupManagerError, Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
@@ -28,6 +29,10 @@ if TYPE_CHECKING:
|
||||
CRON_PATTERN_DAILY = "45 4 * * *"
|
||||
CRON_PATTERN_WEEKLY = "45 4 * * {}"
|
||||
|
||||
# Randomize the start time of the backup by up to 60 minutes to avoid
|
||||
# all backups running at the same time.
|
||||
BACKUP_START_TIME_JITTER = 60 * 60
|
||||
|
||||
|
||||
class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
@@ -124,6 +129,7 @@ class BackupConfig:
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
"""Load config."""
|
||||
self.data = BackupConfigData.from_dict(stored_config)
|
||||
self.data.retention.apply(self._manager)
|
||||
self.data.schedule.apply(self._manager)
|
||||
|
||||
async def update(
|
||||
@@ -160,8 +166,13 @@ class RetentionConfig:
|
||||
def apply(self, manager: BackupManager) -> None:
|
||||
"""Apply backup retention configuration."""
|
||||
if self.days is not None:
|
||||
LOGGER.debug(
|
||||
"Scheduling next automatic delete of backups older than %s in 1 day",
|
||||
self.days,
|
||||
)
|
||||
self._schedule_next(manager)
|
||||
else:
|
||||
LOGGER.debug("Unscheduling next automatic delete")
|
||||
self._unschedule_next(manager)
|
||||
|
||||
def to_dict(self) -> StoredRetentionConfig:
|
||||
@@ -318,11 +329,13 @@ class BackupSchedule:
|
||||
password=config_data.create_backup.password,
|
||||
with_automatic_settings=True,
|
||||
)
|
||||
except BackupManagerError as err:
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
except Exception: # noqa: BLE001
|
||||
# another more specific exception will be added
|
||||
# and handled in the future
|
||||
LOGGER.exception("Unexpected error creating automatic backup")
|
||||
|
||||
next_time += timedelta(seconds=random.randint(0, BACKUP_START_TIME_JITTER))
|
||||
LOGGER.debug("Scheduling next automatic backup at %s", next_time)
|
||||
manager.remove_next_backup_event = async_track_point_in_time(
|
||||
manager.hass, _create_backup, next_time
|
||||
)
|
||||
|
||||
@@ -46,15 +46,11 @@ from .const import (
|
||||
EXCLUDE_FROM_BACKUP,
|
||||
LOGGER,
|
||||
)
|
||||
from .models import AgentBackup, Folder
|
||||
from .models import AgentBackup, BackupManagerError, Folder
|
||||
from .store import BackupStore
|
||||
from .util import make_backup_dir, read_backup, validate_password
|
||||
|
||||
|
||||
class IncorrectPasswordError(HomeAssistantError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class NewBackup:
|
||||
"""New backup class."""
|
||||
@@ -245,6 +241,14 @@ class BackupReaderWriter(abc.ABC):
|
||||
"""Restore a backup."""
|
||||
|
||||
|
||||
class BackupReaderWriterError(HomeAssistantError):
|
||||
"""Backup reader/writer error."""
|
||||
|
||||
|
||||
class IncorrectPasswordError(BackupReaderWriterError):
|
||||
"""Raised when the password is incorrect."""
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -373,7 +377,9 @@ class BackupManager:
|
||||
)
|
||||
for result in pre_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during pre-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def async_post_backup_actions(self) -> None:
|
||||
"""Perform post backup actions."""
|
||||
@@ -386,7 +392,9 @@ class BackupManager:
|
||||
)
|
||||
for result in post_backup_results:
|
||||
if isinstance(result, Exception):
|
||||
raise result
|
||||
raise BackupManagerError(
|
||||
f"Error during post-backup: {result}"
|
||||
) from result
|
||||
|
||||
async def load_platforms(self) -> None:
|
||||
"""Load backup platforms."""
|
||||
@@ -422,11 +430,22 @@ class BackupManager:
|
||||
return_exceptions=True,
|
||||
)
|
||||
for idx, result in enumerate(sync_backup_results):
|
||||
if isinstance(result, Exception):
|
||||
if isinstance(result, BackupReaderWriterError):
|
||||
# writer errors will affect all agents
|
||||
# no point in continuing
|
||||
raise BackupManagerError(str(result)) from result
|
||||
if isinstance(result, BackupAgentError):
|
||||
LOGGER.error("Error uploading to %s: %s", agent_ids[idx], result)
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.exception(
|
||||
"Error during backup upload - %s", result, exc_info=result
|
||||
)
|
||||
continue
|
||||
if isinstance(result, Exception):
|
||||
# trap bugs from agents
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
LOGGER.error("Unexpected error: %s", result, exc_info=result)
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
|
||||
return agent_errors
|
||||
|
||||
async def async_get_backups(
|
||||
@@ -449,7 +468,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
for agent_backup in result:
|
||||
if (backup_id := agent_backup.backup_id) not in backups:
|
||||
if known_backup := self.known_backups.get(backup_id):
|
||||
@@ -499,7 +518,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
if not result:
|
||||
continue
|
||||
if backup is None:
|
||||
@@ -563,7 +582,7 @@ class BackupManager:
|
||||
agent_errors[agent_ids[idx]] = result
|
||||
continue
|
||||
if isinstance(result, BaseException):
|
||||
raise result
|
||||
raise result # unexpected error
|
||||
|
||||
if not agent_errors:
|
||||
self.known_backups.remove(backup_id)
|
||||
@@ -578,7 +597,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Receive and store a backup file from upload."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
self.async_on_backup_event(
|
||||
ReceiveBackupEvent(stage=None, state=ReceiveBackupState.IN_PROGRESS)
|
||||
)
|
||||
@@ -652,6 +671,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=True,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
assert self._backup_finish_task
|
||||
@@ -669,11 +689,12 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool = False,
|
||||
with_automatic_settings: bool = False,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
if with_automatic_settings:
|
||||
self.config.data.last_attempted_automatic_backup = dt_util.now()
|
||||
@@ -692,6 +713,7 @@ class BackupManager:
|
||||
include_homeassistant=include_homeassistant,
|
||||
name=name,
|
||||
password=password,
|
||||
raise_task_error=raise_task_error,
|
||||
with_automatic_settings=with_automatic_settings,
|
||||
)
|
||||
except Exception:
|
||||
@@ -714,57 +736,81 @@ class BackupManager:
|
||||
include_homeassistant: bool,
|
||||
name: str | None,
|
||||
password: str | None,
|
||||
raise_task_error: bool,
|
||||
with_automatic_settings: bool,
|
||||
) -> NewBackup:
|
||||
"""Initiate generating a backup."""
|
||||
if not agent_ids:
|
||||
raise HomeAssistantError("At least one agent must be selected")
|
||||
if any(agent_id not in self.backup_agents for agent_id in agent_ids):
|
||||
raise HomeAssistantError("Invalid agent selected")
|
||||
raise BackupManagerError("At least one agent must be selected")
|
||||
if invalid_agents := [
|
||||
agent_id for agent_id in agent_ids if agent_id not in self.backup_agents
|
||||
]:
|
||||
raise BackupManagerError(f"Invalid agents selected: {invalid_agents}")
|
||||
if include_all_addons and include_addons:
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
"Cannot include all addons and specify specific addons"
|
||||
)
|
||||
|
||||
backup_name = (
|
||||
name
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} {HAVERSION}"
|
||||
or f"{"Automatic" if with_automatic_settings else "Custom"} backup {HAVERSION}"
|
||||
)
|
||||
new_backup, self._backup_task = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
self._backup_finish_task = self.hass.async_create_task(
|
||||
|
||||
try:
|
||||
(
|
||||
new_backup,
|
||||
self._backup_task,
|
||||
) = await self._reader_writer.async_create_backup(
|
||||
agent_ids=agent_ids,
|
||||
backup_name=backup_name,
|
||||
extra_metadata={
|
||||
"instance_id": await instance_id.async_get(self.hass),
|
||||
"with_automatic_settings": with_automatic_settings,
|
||||
},
|
||||
include_addons=include_addons,
|
||||
include_all_addons=include_all_addons,
|
||||
include_database=include_database,
|
||||
include_folders=include_folders,
|
||||
include_homeassistant=include_homeassistant,
|
||||
on_progress=self.async_on_backup_event,
|
||||
password=password,
|
||||
)
|
||||
except BackupReaderWriterError as err:
|
||||
raise BackupManagerError(str(err)) from err
|
||||
|
||||
backup_finish_task = self._backup_finish_task = self.hass.async_create_task(
|
||||
self._async_finish_backup(agent_ids, with_automatic_settings),
|
||||
name="backup_manager_finish_backup",
|
||||
)
|
||||
if not raise_task_error:
|
||||
|
||||
def log_finish_task_error(task: asyncio.Task[None]) -> None:
|
||||
if task.done() and not task.cancelled() and (err := task.exception()):
|
||||
if isinstance(err, BackupManagerError):
|
||||
LOGGER.error("Error creating backup: %s", err)
|
||||
else:
|
||||
LOGGER.error("Unexpected error: %s", err, exc_info=err)
|
||||
|
||||
backup_finish_task.add_done_callback(log_finish_task_error)
|
||||
|
||||
return new_backup
|
||||
|
||||
async def _async_finish_backup(
|
||||
self, agent_ids: list[str], with_automatic_settings: bool
|
||||
) -> None:
|
||||
"""Finish a backup."""
|
||||
if TYPE_CHECKING:
|
||||
assert self._backup_task is not None
|
||||
backup_success = False
|
||||
try:
|
||||
written_backup = await self._backup_task
|
||||
except Exception as err: # noqa: BLE001
|
||||
LOGGER.debug("Generating backup failed", exc_info=err)
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.FAILED)
|
||||
)
|
||||
except Exception as err:
|
||||
if with_automatic_settings:
|
||||
self._update_issue_backup_failed()
|
||||
|
||||
if isinstance(err, BackupReaderWriterError):
|
||||
raise BackupManagerError(str(err)) from err
|
||||
raise # unexpected error
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Generated new backup with backup_id %s, uploading to agents %s",
|
||||
@@ -777,28 +823,40 @@ class BackupManager:
|
||||
state=CreateBackupState.IN_PROGRESS,
|
||||
)
|
||||
)
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
await written_backup.release_stream()
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
|
||||
try:
|
||||
agent_errors = await self._async_upload_backup(
|
||||
backup=written_backup.backup,
|
||||
agent_ids=agent_ids,
|
||||
open_stream=written_backup.open_stream,
|
||||
)
|
||||
finally:
|
||||
await written_backup.release_stream()
|
||||
self.known_backups.add(written_backup.backup, agent_errors)
|
||||
if not agent_errors:
|
||||
if with_automatic_settings:
|
||||
# create backup was successful, update last_completed_automatic_backup
|
||||
self.config.data.last_completed_automatic_backup = dt_util.now()
|
||||
self.store.save()
|
||||
backup_success = True
|
||||
|
||||
if with_automatic_settings:
|
||||
self._update_issue_after_agent_upload(agent_errors)
|
||||
# delete old backups more numerous than copies
|
||||
# try this regardless of agent errors above
|
||||
await delete_backups_exceeding_configured_count(self)
|
||||
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(stage=None, state=CreateBackupState.COMPLETED)
|
||||
)
|
||||
finally:
|
||||
self._backup_task = None
|
||||
self._backup_finish_task = None
|
||||
self.async_on_backup_event(
|
||||
CreateBackupEvent(
|
||||
stage=None,
|
||||
state=CreateBackupState.COMPLETED
|
||||
if backup_success
|
||||
else CreateBackupState.FAILED,
|
||||
)
|
||||
)
|
||||
self.async_on_backup_event(IdleEvent())
|
||||
|
||||
async def async_restore_backup(
|
||||
@@ -814,7 +872,7 @@ class BackupManager:
|
||||
) -> None:
|
||||
"""Initiate restoring a backup."""
|
||||
if self.state is not BackupManagerState.IDLE:
|
||||
raise HomeAssistantError(f"Backup manager busy: {self.state}")
|
||||
raise BackupManagerError(f"Backup manager busy: {self.state}")
|
||||
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.IN_PROGRESS)
|
||||
@@ -829,6 +887,9 @@ class BackupManager:
|
||||
restore_folders=restore_folders,
|
||||
restore_homeassistant=restore_homeassistant,
|
||||
)
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.COMPLETED)
|
||||
)
|
||||
except Exception:
|
||||
self.async_on_backup_event(
|
||||
RestoreBackupEvent(stage=None, state=RestoreBackupState.FAILED)
|
||||
@@ -851,7 +912,7 @@ class BackupManager:
|
||||
"""Initiate restoring a backup."""
|
||||
agent = self.backup_agents[agent_id]
|
||||
if not await agent.async_get_backup(backup_id):
|
||||
raise HomeAssistantError(
|
||||
raise BackupManagerError(
|
||||
f"Backup {backup_id} not found in agent {agent_id}"
|
||||
)
|
||||
|
||||
@@ -1024,11 +1085,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
backup_id = _generate_backup_id(date_str, backup_name)
|
||||
|
||||
if include_addons or include_all_addons or include_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported by core backup"
|
||||
)
|
||||
if not include_homeassistant:
|
||||
raise HomeAssistantError("Home Assistant must be included in backup")
|
||||
raise BackupReaderWriterError("Home Assistant must be included in backup")
|
||||
|
||||
backup_task = self._hass.async_create_task(
|
||||
self._async_create_backup(
|
||||
@@ -1099,6 +1160,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
password,
|
||||
local_agent_tar_file_path,
|
||||
)
|
||||
except (BackupManagerError, OSError, tarfile.TarError, ValueError) as err:
|
||||
# BackupManagerError from async_pre_backup_actions
|
||||
# OSError from file operations
|
||||
# TarError from tarfile
|
||||
# ValueError from json_bytes
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
else:
|
||||
backup = AgentBackup(
|
||||
addons=[],
|
||||
backup_id=backup_id,
|
||||
@@ -1116,12 +1184,15 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async_add_executor_job = self._hass.async_add_executor_job
|
||||
|
||||
async def send_backup() -> AsyncIterator[bytes]:
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
f = await async_add_executor_job(tar_file_path.open, "rb")
|
||||
try:
|
||||
while chunk := await async_add_executor_job(f.read, 2**20):
|
||||
yield chunk
|
||||
finally:
|
||||
await async_add_executor_job(f.close)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
async def open_backup() -> AsyncIterator[bytes]:
|
||||
return send_backup()
|
||||
@@ -1129,14 +1200,20 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
async def remove_backup() -> None:
|
||||
if local_agent_tar_file_path:
|
||||
return
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
try:
|
||||
await async_add_executor_job(tar_file_path.unlink, True)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
return WrittenBackup(
|
||||
backup=backup, open_stream=open_backup, release_stream=remove_backup
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
await manager.async_post_backup_actions()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1206,6 +1283,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
if self._local_agent_id in agent_ids:
|
||||
local_agent = manager.local_backup_agents[self._local_agent_id]
|
||||
tar_file_path = local_agent.get_backup_path(backup.backup_id)
|
||||
await async_add_executor_job(make_backup_dir, tar_file_path.parent)
|
||||
await async_add_executor_job(shutil.move, temp_file, tar_file_path)
|
||||
else:
|
||||
tar_file_path = temp_file
|
||||
@@ -1249,11 +1327,11 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""
|
||||
|
||||
if restore_addons or restore_folders:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Addons and folders are not supported in core restore"
|
||||
)
|
||||
if not restore_homeassistant and not restore_database:
|
||||
raise HomeAssistantError(
|
||||
raise BackupReaderWriterError(
|
||||
"Home Assistant or database must be included in restore"
|
||||
)
|
||||
|
||||
@@ -1298,7 +1376,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(_write_restore_file)
|
||||
await self._hass.services.async_call("homeassistant", "restart", {})
|
||||
await self._hass.services.async_call("homeassistant", "restart", blocking=True)
|
||||
|
||||
|
||||
def _generate_backup_id(date: str, name: str) -> str:
|
||||
|
||||
@@ -6,6 +6,8 @@ from dataclasses import asdict, dataclass
|
||||
from enum import StrEnum
|
||||
from typing import Any, Self
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AddonInfo:
|
||||
@@ -67,3 +69,7 @@ class AgentBackup:
|
||||
protected=data["protected"],
|
||||
size=data["size"],
|
||||
)
|
||||
|
||||
|
||||
class BackupManagerError(HomeAssistantError):
|
||||
"""Backup manager error."""
|
||||
|
||||
@@ -5,8 +5,8 @@
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
},
|
||||
"automatic_backup_failed_upload_agents": {
|
||||
"title": "Automatic backup could not be uploaded to agents",
|
||||
"description": "The automatic backup could not be uploaded to agents {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
"title": "Automatic backup could not be uploaded to the configured locations",
|
||||
"description": "The automatic backup could not be uploaded to the configured locations {failed_agents}. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -34,7 +34,7 @@ class BangOlufsenData:
|
||||
|
||||
type BangOlufsenConfigEntry = ConfigEntry[BangOlufsenData]
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [Platform.EVENT, Platform.MEDIA_PLAYER]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BangOlufsenConfigEntry) -> bool:
|
||||
|
||||
@@ -79,6 +79,7 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
PLAYBACK_METADATA = "playback_metadata"
|
||||
PLAYBACK_PROGRESS = "playback_progress"
|
||||
@@ -203,14 +204,60 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
),
|
||||
]
|
||||
)
|
||||
# Map for storing compatibility of devices.
|
||||
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: Final[str] = "device_buttons"
|
||||
|
||||
MODEL_SUPPORT_MAP = {
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: (
|
||||
BangOlufsenModel.BEOLAB_8,
|
||||
BangOlufsenModel.BEOLAB_28,
|
||||
BangOlufsenModel.BEOSOUND_2,
|
||||
BangOlufsenModel.BEOSOUND_A5,
|
||||
BangOlufsenModel.BEOSOUND_A9,
|
||||
BangOlufsenModel.BEOSOUND_BALANCE,
|
||||
BangOlufsenModel.BEOSOUND_EMERGE,
|
||||
BangOlufsenModel.BEOSOUND_LEVEL,
|
||||
BangOlufsenModel.BEOSOUND_THEATRE,
|
||||
)
|
||||
}
|
||||
|
||||
# Device events
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
|
||||
EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
"shortPress (Release)": "short_press_release",
|
||||
"longPress (Timeout)": "long_press_timeout",
|
||||
"longPress (Release)": "long_press_release",
|
||||
"veryLongPress (Timeout)": "very_long_press_timeout",
|
||||
"veryLongPress (Release)": "very_long_press_release",
|
||||
}
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
DEVICE_BUTTONS: Final[list[str]] = [
|
||||
"Bluetooth",
|
||||
"Microphone",
|
||||
"Next",
|
||||
"PlayPause",
|
||||
"Preset1",
|
||||
"Preset2",
|
||||
"Preset3",
|
||||
"Preset4",
|
||||
"Previous",
|
||||
"Volume",
|
||||
]
|
||||
|
||||
|
||||
DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
"short_press_release",
|
||||
"long_press_timeout",
|
||||
"long_press_release",
|
||||
"very_long_press_timeout",
|
||||
"very_long_press_release",
|
||||
]
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
|
||||
76
homeassistant/components/bang_olufsen/event.py
Normal file
76
homeassistant/components/bang_olufsen/event.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Event entities for the Bang & Olufsen integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sensor entities from config entry."""
|
||||
|
||||
if config_entry.data[CONF_MODEL] in MODEL_SUPPORT_MAP[MODEL_SUPPORT_DEVICE_BUTTONS]:
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in DEVICE_BUTTONS
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Event class for Button events."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
_attr_entity_registry_enabled_default = False
|
||||
_attr_event_types = DEVICE_BUTTON_EVENTS
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
|
||||
"""Initialize Button."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_{button_type}"
|
||||
|
||||
# Make the native button name Home Assistant compatible
|
||||
self._attr_translation_key = button_type.lower()
|
||||
|
||||
self._button_type = button_type
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Listen to WebSocket button events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
@@ -1,7 +1,12 @@
|
||||
{
|
||||
"common": {
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity.",
|
||||
"jid_options_name": "JID options",
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
|
||||
"long_press_release": "Release of long press",
|
||||
"long_press_timeout": "Long press",
|
||||
"short_press_release": "Release of short press",
|
||||
"very_long_press_release": "Release of very long press",
|
||||
"very_long_press_timeout": "Very long press"
|
||||
},
|
||||
"config": {
|
||||
"error": {
|
||||
@@ -29,6 +34,150 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"event": {
|
||||
"bluetooth": {
|
||||
"name": "Bluetooth",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"microphone": {
|
||||
"name": "Microphone",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"next": {
|
||||
"name": "Next",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"playpause": {
|
||||
"name": "Play / Pause",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset1": {
|
||||
"name": "Favourite 1",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset2": {
|
||||
"name": "Favourite 2",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset3": {
|
||||
"name": "Favourite 3",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"preset4": {
|
||||
"name": "Favourite 4",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"previous": {
|
||||
"name": "Previous",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"volume": {
|
||||
"name": "Volume",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"short_press_release": "[%key:component::bang_olufsen::common::short_press_release%]",
|
||||
"long_press_timeout": "[%key:component::bang_olufsen::common::long_press_timeout%]",
|
||||
"long_press_release": "[%key:component::bang_olufsen::common::long_press_release%]",
|
||||
"very_long_press_timeout": "[%key:component::bang_olufsen::common::very_long_press_timeout%]",
|
||||
"very_long_press_release": "[%key:component::bang_olufsen::common::very_long_press_release%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"source_ids": {
|
||||
"options": {
|
||||
|
||||
@@ -3,8 +3,10 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
PlaybackContentMetadata,
|
||||
PlaybackError,
|
||||
@@ -26,6 +28,7 @@ from homeassistant.util.enum import try_parse_enum
|
||||
from .const import (
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT,
|
||||
CONNECTION_STATUS,
|
||||
EVENT_TRANSLATION_MAP,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenBase
|
||||
@@ -54,6 +57,8 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_button_notifications(self.on_button_notification)
|
||||
|
||||
self._client.get_playback_error_notifications(
|
||||
self.on_playback_error_notification
|
||||
)
|
||||
@@ -104,6 +109,19 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_button_notification(self, notification: ButtonEvent) -> None:
|
||||
"""Send button dispatch."""
|
||||
# State is expected to always be available.
|
||||
if TYPE_CHECKING:
|
||||
assert notification.state
|
||||
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
EVENT_TRANSLATION_MAP[notification.state],
|
||||
)
|
||||
|
||||
def on_notification_notification(
|
||||
self, notification: WebsocketNotificationTag
|
||||
) -> None:
|
||||
|
||||
@@ -14,10 +14,13 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.MEDIA_PLAYER]
|
||||
PLATFORMS = [
|
||||
Platform.MEDIA_PLAYER,
|
||||
]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -26,6 +29,7 @@ class BluesoundRuntimeData:
|
||||
|
||||
player: Player
|
||||
sync_status: SyncStatus
|
||||
coordinator: BluesoundCoordinator
|
||||
|
||||
|
||||
type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
|
||||
@@ -33,9 +37,6 @@ type BluesoundConfigEntry = ConfigEntry[BluesoundRuntimeData]
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Bluesound."""
|
||||
if DOMAIN not in hass.data:
|
||||
hass.data[DOMAIN] = []
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -46,13 +47,16 @@ async def async_setup_entry(
|
||||
host = config_entry.data[CONF_HOST]
|
||||
port = config_entry.data[CONF_PORT]
|
||||
session = async_get_clientsession(hass)
|
||||
async with Player(host, port, session=session, default_timeout=10) as player:
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError as ex:
|
||||
raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex
|
||||
player = Player(host, port, session=session, default_timeout=10)
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError as ex:
|
||||
raise ConfigEntryNotReady(f"Error connecting to {host}:{port}") from ex
|
||||
|
||||
config_entry.runtime_data = BluesoundRuntimeData(player, sync_status)
|
||||
coordinator = BluesoundCoordinator(hass, player, sync_status)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
config_entry.runtime_data = BluesoundRuntimeData(player, sync_status, coordinator)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(config_entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -71,27 +71,6 @@ class BluesoundConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import bluesound config entry from configuration.yaml."""
|
||||
session = async_get_clientsession(self.hass)
|
||||
async with Player(
|
||||
import_data[CONF_HOST], import_data[CONF_PORT], session=session
|
||||
) as player:
|
||||
try:
|
||||
sync_status = await player.sync_status(timeout=1)
|
||||
except PlayerUnreachableError:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
|
||||
await self.async_set_unique_id(
|
||||
format_unique_id(sync_status.mac, import_data[CONF_PORT])
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=sync_status.name,
|
||||
data=import_data,
|
||||
)
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: zeroconf.ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
|
||||
160
homeassistant/components/bluesound/coordinator.py
Normal file
160
homeassistant/components/bluesound/coordinator.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Define a base coordinator for Bluesound entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
import contextlib
|
||||
from dataclasses import dataclass, replace
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyblu import Input, Player, Preset, Status, SyncStatus
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
NODE_OFFLINE_CHECK_TIMEOUT = timedelta(minutes=3)
|
||||
PRESET_AND_INPUTS_INTERVAL = timedelta(minutes=15)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BluesoundData:
|
||||
"""Define a class to hold Bluesound data."""
|
||||
|
||||
sync_status: SyncStatus
|
||||
status: Status
|
||||
presets: list[Preset]
|
||||
inputs: list[Input]
|
||||
|
||||
|
||||
def cancel_task(task: asyncio.Task) -> Callable[[], Coroutine[None, None, None]]:
|
||||
"""Cancel a task."""
|
||||
|
||||
async def _cancel_task() -> None:
|
||||
task.cancel()
|
||||
with contextlib.suppress(asyncio.CancelledError):
|
||||
await task
|
||||
|
||||
return _cancel_task
|
||||
|
||||
|
||||
class BluesoundCoordinator(DataUpdateCoordinator[BluesoundData]):
|
||||
"""Define an object to hold Bluesound data."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, player: Player, sync_status: SyncStatus
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
self.player = player
|
||||
self._inital_sync_status = sync_status
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
logger=_LOGGER,
|
||||
name=sync_status.name,
|
||||
)
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
assert self.config_entry is not None
|
||||
|
||||
preset = await self.player.presets()
|
||||
inputs = await self.player.inputs()
|
||||
status = await self.player.status()
|
||||
|
||||
self.async_set_updated_data(
|
||||
BluesoundData(
|
||||
sync_status=self._inital_sync_status,
|
||||
status=status,
|
||||
presets=preset,
|
||||
inputs=inputs,
|
||||
)
|
||||
)
|
||||
|
||||
status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_status_loop(),
|
||||
name=f"bluesound.poll_status_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(status_loop_task))
|
||||
|
||||
sync_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_sync_status_loop(),
|
||||
name=f"bluesound.poll_sync_status_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(sync_status_loop_task))
|
||||
|
||||
presets_and_inputs_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_presets_and_inputs_loop(),
|
||||
name=f"bluesound.poll_presets_and_inputs_loop_{self.data.sync_status.id}",
|
||||
)
|
||||
self.config_entry.async_on_unload(cancel_task(presets_and_inputs_loop_task))
|
||||
|
||||
async def _async_update_data(self) -> BluesoundData:
|
||||
return self.data
|
||||
|
||||
async def _poll_presets_and_inputs_loop(self) -> None:
|
||||
while True:
|
||||
await asyncio.sleep(PRESET_AND_INPUTS_INTERVAL.total_seconds())
|
||||
try:
|
||||
preset = await self.player.presets()
|
||||
inputs = await self.player.inputs()
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
presets=preset,
|
||||
inputs=inputs,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
|
||||
async def _poll_status_loop(self) -> None:
|
||||
"""Loop which polls the status of the player."""
|
||||
while True:
|
||||
try:
|
||||
status = await self.player.status(
|
||||
etag=self.data.status.etag, poll_timeout=120, timeout=125
|
||||
)
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
status=status,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
except asyncio.CancelledError:
|
||||
return
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
|
||||
async def _poll_sync_status_loop(self) -> None:
|
||||
"""Loop which polls the sync status of the player."""
|
||||
while True:
|
||||
try:
|
||||
sync_status = await self.player.sync_status(
|
||||
etag=self.data.sync_status.etag, poll_timeout=120, timeout=125
|
||||
)
|
||||
self.async_set_updated_data(
|
||||
replace(
|
||||
self.data,
|
||||
sync_status=sync_status,
|
||||
)
|
||||
)
|
||||
except PlayerUnreachableError as ex:
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
except asyncio.CancelledError:
|
||||
raise
|
||||
except Exception as ex: # noqa: BLE001 - this loop should never stop
|
||||
self.async_set_update_error(ex)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT.total_seconds())
|
||||
@@ -2,20 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from asyncio import CancelledError, Task
|
||||
from contextlib import suppress
|
||||
from asyncio import Task
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from pyblu import Input, Player, Preset, Status, SyncStatus
|
||||
from pyblu.errors import PlayerUnreachableError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA,
|
||||
BrowseMedia,
|
||||
MediaPlayerEntity,
|
||||
MediaPlayerEntityFeature,
|
||||
@@ -23,16 +19,10 @@ from homeassistant.components.media_player import (
|
||||
MediaType,
|
||||
async_process_play_media_url,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_HOST, CONF_HOSTS, CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.data_entry_flow import FlowResultType
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
entity_platform,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_NETWORK_MAC,
|
||||
DeviceInfo,
|
||||
@@ -43,10 +33,11 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN, INTEGRATION_TITLE
|
||||
from .const import ATTR_BLUESOUND_GROUP, ATTR_MASTER, DOMAIN
|
||||
from .coordinator import BluesoundCoordinator
|
||||
from .utils import dispatcher_join_signal, dispatcher_unjoin_signal, format_unique_id
|
||||
|
||||
if TYPE_CHECKING:
|
||||
@@ -64,71 +55,8 @@ SERVICE_JOIN = "join"
|
||||
SERVICE_SET_TIMER = "set_sleep_timer"
|
||||
SERVICE_UNJOIN = "unjoin"
|
||||
|
||||
NODE_OFFLINE_CHECK_TIMEOUT = 180
|
||||
NODE_RETRY_INITIATION = timedelta(minutes=3)
|
||||
|
||||
SYNC_STATUS_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
POLL_TIMEOUT = 120
|
||||
|
||||
PLATFORM_SCHEMA = MEDIA_PLAYER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_HOSTS): vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Optional(CONF_NAME): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
}
|
||||
],
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _async_import(hass: HomeAssistant, config: ConfigType) -> None:
|
||||
"""Import config entry from configuration.yaml."""
|
||||
if not hass.config_entries.async_entries(DOMAIN):
|
||||
# Start import flow
|
||||
result = await hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config
|
||||
)
|
||||
if (
|
||||
result["type"] == FlowResultType.ABORT
|
||||
and result["reason"] == "cannot_connect"
|
||||
):
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=f"deprecated_yaml_import_issue_{result['reason']}",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
return
|
||||
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.2.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": INTEGRATION_TITLE,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -137,10 +65,10 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the Bluesound entry."""
|
||||
bluesound_player = BluesoundPlayer(
|
||||
config_entry.runtime_data.coordinator,
|
||||
config_entry.data[CONF_HOST],
|
||||
config_entry.data[CONF_PORT],
|
||||
config_entry.runtime_data.player,
|
||||
config_entry.runtime_data.sync_status,
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
@@ -155,27 +83,10 @@ async def async_setup_entry(
|
||||
)
|
||||
platform.async_register_entity_service(SERVICE_UNJOIN, None, "async_unjoin")
|
||||
|
||||
hass.data[DATA_BLUESOUND].append(bluesound_player)
|
||||
async_add_entities([bluesound_player], update_before_add=True)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None,
|
||||
) -> None:
|
||||
"""Trigger import flows."""
|
||||
hosts = config.get(CONF_HOSTS, [])
|
||||
for host in hosts:
|
||||
import_data = {
|
||||
CONF_HOST: host[CONF_HOST],
|
||||
CONF_PORT: host.get(CONF_PORT, 11000),
|
||||
}
|
||||
hass.async_create_task(_async_import(hass, import_data))
|
||||
|
||||
|
||||
class BluesoundPlayer(MediaPlayerEntity):
|
||||
class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity):
|
||||
"""Representation of a Bluesound Player."""
|
||||
|
||||
_attr_media_content_type = MediaType.MUSIC
|
||||
@@ -184,12 +95,15 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BluesoundCoordinator,
|
||||
host: str,
|
||||
port: int,
|
||||
player: Player,
|
||||
sync_status: SyncStatus,
|
||||
) -> None:
|
||||
"""Initialize the media player."""
|
||||
super().__init__(coordinator)
|
||||
sync_status = coordinator.data.sync_status
|
||||
|
||||
self.host = host
|
||||
self.port = port
|
||||
self._poll_status_loop_task: Task[None] | None = None
|
||||
@@ -197,15 +111,14 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
self._id = sync_status.id
|
||||
self._last_status_update: datetime | None = None
|
||||
self._sync_status = sync_status
|
||||
self._status: Status | None = None
|
||||
self._inputs: list[Input] = []
|
||||
self._presets: list[Preset] = []
|
||||
self._status: Status = coordinator.data.status
|
||||
self._inputs: list[Input] = coordinator.data.inputs
|
||||
self._presets: list[Preset] = coordinator.data.presets
|
||||
self._group_name: str | None = None
|
||||
self._group_list: list[str] = []
|
||||
self._bluesound_device_name = sync_status.name
|
||||
self._player = player
|
||||
self._is_leader = False
|
||||
self._leader: BluesoundPlayer | None = None
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._attr_unique_id = format_unique_id(sync_status.mac, port)
|
||||
# there should always be one player with the default port per mac
|
||||
@@ -228,52 +141,10 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
via_device=(DOMAIN, format_mac(sync_status.mac)),
|
||||
)
|
||||
|
||||
async def _poll_status_loop(self) -> None:
|
||||
"""Loop which polls the status of the player."""
|
||||
while True:
|
||||
try:
|
||||
await self.async_update_status()
|
||||
except PlayerUnreachableError:
|
||||
_LOGGER.error(
|
||||
"Node %s:%s is offline, retrying later", self.host, self.port
|
||||
)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
except CancelledError:
|
||||
_LOGGER.debug(
|
||||
"Stopping the polling of node %s:%s", self.host, self.port
|
||||
)
|
||||
return
|
||||
except: # noqa: E722 - this loop should never stop
|
||||
_LOGGER.exception(
|
||||
"Unexpected error for %s:%s, retrying later", self.host, self.port
|
||||
)
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
|
||||
async def _poll_sync_status_loop(self) -> None:
|
||||
"""Loop which polls the sync status of the player."""
|
||||
while True:
|
||||
try:
|
||||
await self.update_sync_status()
|
||||
except PlayerUnreachableError:
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
except CancelledError:
|
||||
raise
|
||||
except: # noqa: E722 - all errors must be caught for this loop
|
||||
await asyncio.sleep(NODE_OFFLINE_CHECK_TIMEOUT)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Start the polling task."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
self._poll_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_status_loop(),
|
||||
name=f"bluesound.poll_status_loop_{self.host}:{self.port}",
|
||||
)
|
||||
self._poll_sync_status_loop_task = self.hass.async_create_background_task(
|
||||
self._poll_sync_status_loop(),
|
||||
name=f"bluesound.poll_sync_status_loop_{self.host}:{self.port}",
|
||||
)
|
||||
|
||||
assert self._sync_status.id is not None
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
@@ -294,105 +165,24 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
"""Stop the polling task."""
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
assert self._poll_status_loop_task is not None
|
||||
if self._poll_status_loop_task.cancel():
|
||||
# the sleeps in _poll_loop will raise CancelledError
|
||||
with suppress(CancelledError):
|
||||
await self._poll_status_loop_task
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._sync_status = self.coordinator.data.sync_status
|
||||
self._status = self.coordinator.data.status
|
||||
self._inputs = self.coordinator.data.inputs
|
||||
self._presets = self.coordinator.data.presets
|
||||
|
||||
assert self._poll_sync_status_loop_task is not None
|
||||
if self._poll_sync_status_loop_task.cancel():
|
||||
# the sleeps in _poll_sync_status_loop will raise CancelledError
|
||||
with suppress(CancelledError):
|
||||
await self._poll_sync_status_loop_task
|
||||
|
||||
self.hass.data[DATA_BLUESOUND].remove(self)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update internal status of the entity."""
|
||||
if not self.available:
|
||||
return
|
||||
|
||||
with suppress(PlayerUnreachableError):
|
||||
await self.async_update_presets()
|
||||
await self.async_update_captures()
|
||||
|
||||
async def async_update_status(self) -> None:
|
||||
"""Use the poll session to always get the status of the player."""
|
||||
etag = None
|
||||
if self._status is not None:
|
||||
etag = self._status.etag
|
||||
|
||||
try:
|
||||
status = await self._player.status(
|
||||
etag=etag, poll_timeout=POLL_TIMEOUT, timeout=POLL_TIMEOUT + 5
|
||||
)
|
||||
|
||||
self._attr_available = True
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
self._status = status
|
||||
|
||||
self.async_write_ha_state()
|
||||
except PlayerUnreachableError:
|
||||
self._attr_available = False
|
||||
self._last_status_update = None
|
||||
self._status = None
|
||||
self.async_write_ha_state()
|
||||
_LOGGER.error(
|
||||
"Client connection error, marking %s as offline",
|
||||
self._bluesound_device_name,
|
||||
)
|
||||
raise
|
||||
|
||||
async def update_sync_status(self) -> None:
|
||||
"""Update the internal status."""
|
||||
etag = None
|
||||
if self._sync_status:
|
||||
etag = self._sync_status.etag
|
||||
sync_status = await self._player.sync_status(
|
||||
etag=etag, poll_timeout=POLL_TIMEOUT, timeout=POLL_TIMEOUT + 5
|
||||
)
|
||||
|
||||
self._sync_status = sync_status
|
||||
self._last_status_update = dt_util.utcnow()
|
||||
|
||||
self._group_list = self.rebuild_bluesound_group()
|
||||
|
||||
if sync_status.leader is not None:
|
||||
self._is_leader = False
|
||||
leader_id = f"{sync_status.leader.ip}:{sync_status.leader.port}"
|
||||
leader_device = [
|
||||
device
|
||||
for device in self.hass.data[DATA_BLUESOUND]
|
||||
if device.id == leader_id
|
||||
]
|
||||
|
||||
if leader_device and leader_id != self.id:
|
||||
self._leader = leader_device[0]
|
||||
else:
|
||||
self._leader = None
|
||||
_LOGGER.error("Leader not found %s", leader_id)
|
||||
else:
|
||||
if self._leader is not None:
|
||||
self._leader = None
|
||||
followers = self._sync_status.followers
|
||||
self._is_leader = followers is not None
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_update_captures(self) -> None:
|
||||
"""Update Capture sources."""
|
||||
inputs = await self._player.inputs()
|
||||
self._inputs = inputs
|
||||
|
||||
async def async_update_presets(self) -> None:
|
||||
"""Update Presets."""
|
||||
presets = await self._player.presets()
|
||||
self._presets = presets
|
||||
|
||||
@property
|
||||
def state(self) -> MediaPlayerState:
|
||||
"""Return the state of the device."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return MediaPlayerState.OFF
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -409,7 +199,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
"""Title of current playing media."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
return self._status.name
|
||||
@@ -417,7 +207,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_artist(self) -> str | None:
|
||||
"""Artist of current playing media (Music track only)."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return None
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -428,7 +218,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_album_name(self) -> str | None:
|
||||
"""Artist of current playing media (Music track only)."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
return self._status.album
|
||||
@@ -436,7 +226,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_image_url(self) -> str | None:
|
||||
"""Image url of current playing media."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
url = self._status.image
|
||||
@@ -451,7 +241,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_position(self) -> int | None:
|
||||
"""Position of current playing media in seconds."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
mediastate = self.state
|
||||
@@ -470,7 +260,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def media_duration(self) -> int | None:
|
||||
"""Duration of current playing media in seconds."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
duration = self._status.total_seconds
|
||||
@@ -487,16 +277,11 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def volume_level(self) -> float | None:
|
||||
"""Volume level of the media player (0..1)."""
|
||||
volume = None
|
||||
volume = self._status.volume
|
||||
|
||||
if self._status is not None:
|
||||
volume = self._status.volume
|
||||
if self.is_grouped:
|
||||
volume = self._sync_status.volume
|
||||
|
||||
if volume is None:
|
||||
return None
|
||||
|
||||
return volume / 100
|
||||
|
||||
@property
|
||||
@@ -529,7 +314,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def source_list(self) -> list[str] | None:
|
||||
"""List of available input sources."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
sources = [x.text for x in self._inputs]
|
||||
@@ -540,7 +325,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Name of the current input source."""
|
||||
if self._status is None or (self.is_grouped and not self.is_leader):
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
if self._status.input_id is not None:
|
||||
@@ -557,7 +342,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
@property
|
||||
def supported_features(self) -> MediaPlayerEntityFeature:
|
||||
"""Flag of media commands that are supported."""
|
||||
if self._status is None:
|
||||
if self.available is False:
|
||||
return MediaPlayerEntityFeature(0)
|
||||
|
||||
if self.is_grouped and not self.is_leader:
|
||||
@@ -659,16 +444,21 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self.sync_status.leader is None and self.sync_status.followers is None:
|
||||
return []
|
||||
|
||||
player_entities: list[BluesoundPlayer] = self.hass.data[DATA_BLUESOUND]
|
||||
config_entries: list[BluesoundConfigEntry] = (
|
||||
self.hass.config_entries.async_entries(DOMAIN)
|
||||
)
|
||||
sync_status_list = [
|
||||
x.runtime_data.coordinator.data.sync_status for x in config_entries
|
||||
]
|
||||
|
||||
leader_sync_status: SyncStatus | None = None
|
||||
if self.sync_status.leader is None:
|
||||
leader_sync_status = self.sync_status
|
||||
else:
|
||||
required_id = f"{self.sync_status.leader.ip}:{self.sync_status.leader.port}"
|
||||
for x in player_entities:
|
||||
if x.sync_status.id == required_id:
|
||||
leader_sync_status = x.sync_status
|
||||
for sync_status in sync_status_list:
|
||||
if sync_status.id == required_id:
|
||||
leader_sync_status = sync_status
|
||||
break
|
||||
|
||||
if leader_sync_status is None or leader_sync_status.followers is None:
|
||||
@@ -676,9 +466,9 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
follower_ids = [f"{x.ip}:{x.port}" for x in leader_sync_status.followers]
|
||||
follower_names = [
|
||||
x.sync_status.name
|
||||
for x in player_entities
|
||||
if x.sync_status.id in follower_ids
|
||||
sync_status.name
|
||||
for sync_status in sync_status_list
|
||||
if sync_status.id in follower_ids
|
||||
]
|
||||
follower_names.insert(0, leader_sync_status.name)
|
||||
return follower_names
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"bluetooth-adapters==0.20.2",
|
||||
"bluetooth-auto-recovery==1.4.2",
|
||||
"bluetooth-data-tools==1.20.0",
|
||||
"dbus-fast==2.24.3",
|
||||
"habluetooth==3.6.0"
|
||||
"dbus-fast==2.28.0",
|
||||
"habluetooth==3.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bring",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bring_api"],
|
||||
"requirements": ["bring-api==0.9.1"]
|
||||
}
|
||||
|
||||
@@ -12,7 +12,7 @@
|
||||
}
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to setup {name}?"
|
||||
"description": "Do you want to set up {name}?"
|
||||
},
|
||||
"reconfigure": {
|
||||
"description": "Reconfigure your Cambridge Audio Streamer.",
|
||||
@@ -28,7 +28,7 @@
|
||||
"cannot_connect": "Failed to connect to Cambridge Audio device. Please make sure the device is powered up and connected to the network. Try power-cycling the device if it does not connect."
|
||||
},
|
||||
"abort": {
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device id. Please make sure you entered the correct IP address.",
|
||||
"wrong_device": "This Cambridge Audio device does not match the existing device ID. Please make sure you entered the correct IP address.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
|
||||
@@ -516,6 +516,19 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""Flag supported features."""
|
||||
return self._attr_supported_features
|
||||
|
||||
@property
|
||||
def supported_features_compat(self) -> CameraEntityFeature:
|
||||
"""Return the supported features as CameraEntityFeature.
|
||||
|
||||
Remove this compatibility shim in 2025.1 or later.
|
||||
"""
|
||||
features = self.supported_features
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CameraEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
@cached_property
|
||||
def is_recording(self) -> bool:
|
||||
"""Return true if the device is recording."""
|
||||
@@ -569,7 +582,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
self._deprecate_attr_frontend_stream_type_logged = True
|
||||
return self._attr_frontend_stream_type
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
if (
|
||||
self._webrtc_provider
|
||||
@@ -798,7 +811,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
self.__supports_stream = self.supported_features & CameraEntityFeature.STREAM
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
@@ -838,7 +853,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self, fn: Callable[[HomeAssistant, Camera], Coroutine[None, None, _T | None]]
|
||||
) -> _T | None:
|
||||
"""Get first provider that supports this camera."""
|
||||
if CameraEntityFeature.STREAM not in self.supported_features:
|
||||
if CameraEntityFeature.STREAM not in self.supported_features_compat:
|
||||
return None
|
||||
|
||||
return await fn(self.hass, self)
|
||||
@@ -896,7 +911,7 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def camera_capabilities(self) -> CameraCapabilities:
|
||||
"""Return the camera capabilities."""
|
||||
frontend_stream_types = set()
|
||||
if CameraEntityFeature.STREAM in self.supported_features:
|
||||
if CameraEntityFeature.STREAM in self.supported_features_compat:
|
||||
if self._supports_native_sync_webrtc or self._supports_native_async_webrtc:
|
||||
# The camera has a native WebRTC implementation
|
||||
frontend_stream_types.add(StreamType.WEB_RTC)
|
||||
@@ -916,7 +931,8 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features & CameraEntityFeature.STREAM
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import configparser
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TYPE_CHECKING, ClassVar
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import aiohttp
|
||||
@@ -129,7 +129,7 @@ class ChromecastInfo:
|
||||
class ChromeCastZeroconf:
|
||||
"""Class to hold a zeroconf instance."""
|
||||
|
||||
__zconf: zeroconf.HaZeroconf | None = None
|
||||
__zconf: ClassVar[zeroconf.HaZeroconf | None] = None
|
||||
|
||||
@classmethod
|
||||
def set_zeroconf(cls, zconf: zeroconf.HaZeroconf) -> None:
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import re
|
||||
|
||||
from pexpect import pxssh
|
||||
import voluptuous as vol
|
||||
@@ -101,11 +100,11 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
|
||||
return False
|
||||
|
||||
def _get_arp_data(self):
|
||||
def _get_arp_data(self) -> str | None:
|
||||
"""Open connection to the router and get arp entries."""
|
||||
|
||||
try:
|
||||
cisco_ssh = pxssh.pxssh()
|
||||
cisco_ssh: pxssh.pxssh[str] = pxssh.pxssh(encoding="uft-8")
|
||||
cisco_ssh.login(
|
||||
self.host,
|
||||
self.username,
|
||||
@@ -115,12 +114,11 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
)
|
||||
|
||||
# Find the hostname
|
||||
initial_line = cisco_ssh.before.decode("utf-8").splitlines()
|
||||
initial_line = (cisco_ssh.before or "").splitlines()
|
||||
router_hostname = initial_line[len(initial_line) - 1]
|
||||
router_hostname += "#"
|
||||
# Set the discovered hostname as prompt
|
||||
regex_expression = f"(?i)^{router_hostname}".encode()
|
||||
cisco_ssh.PROMPT = re.compile(regex_expression, re.MULTILINE)
|
||||
cisco_ssh.PROMPT = f"(?i)^{router_hostname}"
|
||||
# Allow full arp table to print at once
|
||||
cisco_ssh.sendline("terminal length 0")
|
||||
cisco_ssh.prompt(1)
|
||||
@@ -128,13 +126,11 @@ class CiscoDeviceScanner(DeviceScanner):
|
||||
cisco_ssh.sendline("show ip arp")
|
||||
cisco_ssh.prompt(1)
|
||||
|
||||
devices_result = cisco_ssh.before
|
||||
|
||||
return devices_result.decode("utf-8")
|
||||
except pxssh.ExceptionPxssh as px_e:
|
||||
_LOGGER.error("Failed to login via pxssh: %s", px_e)
|
||||
return None
|
||||
|
||||
return None
|
||||
return cisco_ssh.before
|
||||
|
||||
|
||||
def _parse_cisco_mac_address(cisco_hardware_addr):
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pexpect", "ptyprocess"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pexpect==4.6.0"]
|
||||
"requirements": ["pexpect==4.9.0"]
|
||||
}
|
||||
|
||||
@@ -36,7 +36,14 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
|
||||
from . import account_link, http_api
|
||||
# Pre-import backup to avoid it being imported
|
||||
# later when the import executor is busy and delaying
|
||||
# startup
|
||||
from . import (
|
||||
account_link,
|
||||
backup, # noqa: F401
|
||||
http_api,
|
||||
)
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
CONF_ACCOUNT_LINK_SERVER,
|
||||
|
||||
@@ -2,12 +2,15 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import base64
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine, Mapping
|
||||
import hashlib
|
||||
from typing import Any, Self
|
||||
import logging
|
||||
import random
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientTimeout, StreamReader
|
||||
from aiohttp import ClientError, ClientTimeout
|
||||
from hass_nabucasa import Cloud, CloudError
|
||||
from hass_nabucasa.cloud_api import (
|
||||
async_files_delete_file,
|
||||
@@ -18,12 +21,17 @@ from hass_nabucasa.cloud_api import (
|
||||
|
||||
from homeassistant.components.backup import AgentBackup, BackupAgent, BackupAgentError
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import ChunkAsyncStreamIterator
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
from .client import CloudClient
|
||||
from .const import DATA_CLOUD, DOMAIN, EVENT_CLOUD_EVENT
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
_STORAGE_BACKUP = "backup"
|
||||
_RETRY_LIMIT = 5
|
||||
_RETRY_SECONDS_MIN = 60
|
||||
_RETRY_SECONDS_MAX = 600
|
||||
|
||||
|
||||
async def _b64md5(stream: AsyncIterator[bytes]) -> str:
|
||||
@@ -71,31 +79,6 @@ def async_register_backup_agents_listener(
|
||||
return unsub
|
||||
|
||||
|
||||
class ChunkAsyncStreamIterator:
|
||||
"""Async iterator for chunked streams.
|
||||
|
||||
Based on aiohttp.streams.ChunkTupleAsyncStreamIterator, but yields
|
||||
bytes instead of tuple[bytes, bool].
|
||||
"""
|
||||
|
||||
__slots__ = ("_stream",)
|
||||
|
||||
def __init__(self, stream: StreamReader) -> None:
|
||||
"""Initialize."""
|
||||
self._stream = stream
|
||||
|
||||
def __aiter__(self) -> Self:
|
||||
"""Iterate."""
|
||||
return self
|
||||
|
||||
async def __anext__(self) -> bytes:
|
||||
"""Yield next chunk."""
|
||||
rv = await self._stream.readchunk()
|
||||
if rv == (b"", False):
|
||||
raise StopAsyncIteration
|
||||
return rv[0]
|
||||
|
||||
|
||||
class CloudBackupAgent(BackupAgent):
|
||||
"""Cloud backup agent."""
|
||||
|
||||
@@ -136,13 +119,55 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Failed to get download details") from err
|
||||
|
||||
try:
|
||||
resp = await self._cloud.websession.get(details["url"])
|
||||
resp = await self._cloud.websession.get(
|
||||
details["url"],
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
|
||||
resp.raise_for_status()
|
||||
except ClientError as err:
|
||||
raise BackupAgentError("Failed to download backup") from err
|
||||
|
||||
return ChunkAsyncStreamIterator(resp.content)
|
||||
|
||||
async def _async_do_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
filename: str,
|
||||
base64md5hash: str,
|
||||
metadata: dict[str, Any],
|
||||
size: int,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=filename,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
_LOGGER.log(
|
||||
logging.DEBUG if upload_status.status < 400 else logging.WARNING,
|
||||
"Backup upload status: %s",
|
||||
upload_status.status,
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
@@ -159,29 +184,34 @@ class CloudBackupAgent(BackupAgent):
|
||||
raise BackupAgentError("Cloud backups must be protected")
|
||||
|
||||
base64md5hash = await _b64md5(await open_stream())
|
||||
filename = self._get_backup_filename()
|
||||
metadata = backup.as_dict()
|
||||
size = backup.size
|
||||
|
||||
try:
|
||||
details = await async_files_upload_details(
|
||||
self._cloud,
|
||||
storage_type=_STORAGE_BACKUP,
|
||||
filename=self._get_backup_filename(),
|
||||
metadata=backup.as_dict(),
|
||||
size=backup.size,
|
||||
base64md5hash=base64md5hash,
|
||||
)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to get upload details") from err
|
||||
|
||||
try:
|
||||
upload_status = await self._cloud.websession.put(
|
||||
details["url"],
|
||||
data=await open_stream(),
|
||||
headers=details["headers"] | {"content-length": str(backup.size)},
|
||||
timeout=ClientTimeout(connect=10.0, total=43200.0), # 43200s == 12h
|
||||
)
|
||||
upload_status.raise_for_status()
|
||||
except (TimeoutError, ClientError) as err:
|
||||
raise BackupAgentError("Failed to upload backup") from err
|
||||
tries = 1
|
||||
while tries <= _RETRY_LIMIT:
|
||||
try:
|
||||
await self._async_do_upload_backup(
|
||||
open_stream=open_stream,
|
||||
filename=filename,
|
||||
base64md5hash=base64md5hash,
|
||||
metadata=metadata,
|
||||
size=size,
|
||||
)
|
||||
break
|
||||
except BackupAgentError as err:
|
||||
if tries == _RETRY_LIMIT:
|
||||
raise
|
||||
tries += 1
|
||||
retry_timer = random.randint(_RETRY_SECONDS_MIN, _RETRY_SECONDS_MAX)
|
||||
_LOGGER.info(
|
||||
"Failed to upload backup, retrying (%s/%s) in %ss: %s",
|
||||
tries,
|
||||
_RETRY_LIMIT,
|
||||
retry_timer,
|
||||
err,
|
||||
)
|
||||
await asyncio.sleep(retry_timer)
|
||||
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
@@ -208,6 +238,7 @@ class CloudBackupAgent(BackupAgent):
|
||||
"""List backups."""
|
||||
try:
|
||||
backups = await async_files_list(self._cloud, storage_type=_STORAGE_BACKUP)
|
||||
_LOGGER.debug("Cloud backups: %s", backups)
|
||||
except (ClientError, CloudError) as err:
|
||||
raise BackupAgentError("Failed to list backups") from err
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.9.1"]
|
||||
"requirements": ["aiocomelit==0.10.1"]
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["numpy==2.2.0"]
|
||||
"requirements": ["numpy==2.2.1"]
|
||||
}
|
||||
|
||||
@@ -75,6 +75,7 @@ async def async_converse(
|
||||
language: str | None = None,
|
||||
agent_id: str | None = None,
|
||||
device_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
) -> ConversationResult:
|
||||
"""Process text and get intent."""
|
||||
agent = async_get_agent(hass, agent_id)
|
||||
@@ -99,6 +100,7 @@ async def async_converse(
|
||||
device_id=device_id,
|
||||
language=language,
|
||||
agent_id=agent_id,
|
||||
extra_system_prompt=extra_system_prompt,
|
||||
)
|
||||
with async_conversation_trace() as trace:
|
||||
trace.add_event(
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.0.5", "home-assistant-intents==2024.12.20"]
|
||||
"requirements": ["hassil==2.1.0", "home-assistant-intents==2025.1.1"]
|
||||
}
|
||||
|
||||
@@ -40,6 +40,9 @@ class ConversationInput:
|
||||
agent_id: str | None = None
|
||||
"""Agent to use for processing."""
|
||||
|
||||
extra_system_prompt: str | None = None
|
||||
"""Extra prompt to provide extra info to LLMs how to understand the command."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return input as a dict."""
|
||||
return {
|
||||
@@ -49,6 +52,7 @@ class ConversationInput:
|
||||
"device_id": self.device_id,
|
||||
"language": self.language,
|
||||
"agent_id": self.agent_id,
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -2,39 +2,29 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, CookidooLocalizationConfig
|
||||
import logging
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_COUNTRY,
|
||||
CONF_EMAIL,
|
||||
CONF_LANGUAGE,
|
||||
CONF_PASSWORD,
|
||||
Platform,
|
||||
)
|
||||
from cookidoo_api import CookidooAuthException, CookidooRequestException
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
from .helpers import cookidoo_from_config_entry
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.TODO]
|
||||
PLATFORMS: list[Platform] = [Platform.BUTTON, Platform.TODO]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Set up Cookidoo from a config entry."""
|
||||
|
||||
cookidoo = Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=entry.data[CONF_EMAIL],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=entry.data[CONF_COUNTRY].lower(),
|
||||
language=entry.data[CONF_LANGUAGE],
|
||||
),
|
||||
),
|
||||
coordinator = CookidooDataUpdateCoordinator(
|
||||
hass, await cookidoo_from_config_entry(hass, entry), entry
|
||||
)
|
||||
|
||||
coordinator = CookidooDataUpdateCoordinator(hass, cookidoo, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
@@ -47,3 +37,56 @@ async def async_setup_entry(hass: HomeAssistant, entry: CookidooConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: CookidooConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_migrate_entry(
|
||||
hass: HomeAssistant, config_entry: CookidooConfigEntry
|
||||
) -> bool:
|
||||
"""Migrate config entry."""
|
||||
_LOGGER.debug("Migrating from version %s", config_entry.version)
|
||||
|
||||
if config_entry.version == 1 and config_entry.minor_version == 1:
|
||||
# Add the unique uuid
|
||||
cookidoo = await cookidoo_from_config_entry(hass, config_entry)
|
||||
|
||||
try:
|
||||
auth_data = await cookidoo.login()
|
||||
except (CookidooRequestException, CookidooAuthException) as e:
|
||||
_LOGGER.error(
|
||||
"Could not migrate config config_entry: %s",
|
||||
str(e),
|
||||
)
|
||||
return False
|
||||
|
||||
unique_id = auth_data.sub
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
entity_registry = er.async_get(hass)
|
||||
device_entries = dr.async_entries_for_config_entry(
|
||||
device_registry, config_entry_id=config_entry.entry_id
|
||||
)
|
||||
entity_entries = er.async_entries_for_config_entry(
|
||||
entity_registry, config_entry_id=config_entry.entry_id
|
||||
)
|
||||
for dev in device_entries:
|
||||
device_registry.async_update_device(
|
||||
dev.id, new_identifiers={(DOMAIN, unique_id)}
|
||||
)
|
||||
for ent in entity_entries:
|
||||
assert ent.config_entry_id
|
||||
entity_registry.async_update_entity(
|
||||
ent.entity_id,
|
||||
new_unique_id=ent.unique_id.replace(ent.config_entry_id, unique_id),
|
||||
)
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry, unique_id=auth_data.sub, minor_version=2
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migration to version %s.%s successful",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
71
homeassistant/components/cookidoo/button.py
Normal file
71
homeassistant/components/cookidoo/button.py
Normal file
@@ -0,0 +1,71 @@
|
||||
"""Support for Cookidoo buttons."""
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooException
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import CookidooConfigEntry, CookidooDataUpdateCoordinator
|
||||
from .entity import CookidooBaseEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class CookidooButtonEntityDescription(ButtonEntityDescription):
|
||||
"""Describes cookidoo button entity."""
|
||||
|
||||
press_fn: Callable[[Cookidoo], Awaitable[None]]
|
||||
|
||||
|
||||
TODO_CLEAR = CookidooButtonEntityDescription(
|
||||
key="todo_clear",
|
||||
translation_key="todo_clear",
|
||||
press_fn=lambda client: client.clear_shopping_list(),
|
||||
entity_registry_enabled_default=False,
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: CookidooConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Cookidoo button entities based on a config entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities([CookidooButton(coordinator, TODO_CLEAR)])
|
||||
|
||||
|
||||
class CookidooButton(CookidooBaseEntity, ButtonEntity):
|
||||
"""Defines an Cookidoo button."""
|
||||
|
||||
entity_description: CookidooButtonEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: CookidooDataUpdateCoordinator,
|
||||
description: CookidooButtonEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize cookidoo button."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = description
|
||||
assert coordinator.config_entry.unique_id
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{description.key}"
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
try:
|
||||
await self.entity_description.press_fn(self.coordinator.cookidoo)
|
||||
except CookidooException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="button_clear_todo_failed",
|
||||
) from e
|
||||
await self.coordinator.async_refresh()
|
||||
@@ -7,10 +7,7 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from cookidoo_api import (
|
||||
Cookidoo,
|
||||
CookidooAuthException,
|
||||
CookidooConfig,
|
||||
CookidooLocalizationConfig,
|
||||
CookidooRequestException,
|
||||
get_country_options,
|
||||
get_localization_options,
|
||||
@@ -24,7 +21,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlowResult,
|
||||
)
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
CountrySelector,
|
||||
CountrySelectorConfig,
|
||||
@@ -36,6 +32,7 @@ from homeassistant.helpers.selector import (
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import cookidoo_from_config_data
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -58,10 +55,14 @@ AUTH_DATA_SCHEMA = {
|
||||
class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Cookidoo."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
|
||||
COUNTRY_DATA_SCHEMA: dict
|
||||
LANGUAGE_DATA_SCHEMA: dict
|
||||
|
||||
user_input: dict[str, Any]
|
||||
user_uuid: str
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any]
|
||||
@@ -79,8 +80,11 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None and not (
|
||||
errors := await self.validate_input(user_input)
|
||||
):
|
||||
await self.async_set_unique_id(self.user_uuid)
|
||||
if self.source == SOURCE_USER:
|
||||
self._async_abort_entries_match({CONF_EMAIL: user_input[CONF_EMAIL]})
|
||||
self._abort_if_unique_id_configured()
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
self.user_input = user_input
|
||||
return await self.async_step_language()
|
||||
await self.generate_country_schema()
|
||||
@@ -154,10 +158,8 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if not (
|
||||
errors := await self.validate_input({**reauth_entry.data, **user_input})
|
||||
):
|
||||
if user_input[CONF_EMAIL] != reauth_entry.data[CONF_EMAIL]:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_EMAIL: user_input[CONF_EMAIL]}
|
||||
)
|
||||
await self.async_set_unique_id(self.user_uuid)
|
||||
self._abort_if_unique_id_mismatch()
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry, data_updates=user_input
|
||||
)
|
||||
@@ -219,22 +221,12 @@ class CookidooConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
else:
|
||||
data_input[CONF_LANGUAGE] = (
|
||||
await get_localization_options(country=data_input[CONF_COUNTRY].lower())
|
||||
)[0] # Pick any language to test login
|
||||
)[0].language # Pick any language to test login
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
cookidoo = Cookidoo(
|
||||
session,
|
||||
CookidooConfig(
|
||||
email=data_input[CONF_EMAIL],
|
||||
password=data_input[CONF_PASSWORD],
|
||||
localization=CookidooLocalizationConfig(
|
||||
country_code=data_input[CONF_COUNTRY].lower(),
|
||||
language=data_input[CONF_LANGUAGE],
|
||||
),
|
||||
),
|
||||
)
|
||||
cookidoo = await cookidoo_from_config_data(self.hass, data_input)
|
||||
try:
|
||||
await cookidoo.login()
|
||||
auth_data = await cookidoo.login()
|
||||
self.user_uuid = auth_data.sub
|
||||
if language_input:
|
||||
await cookidoo.get_additional_items()
|
||||
except CookidooRequestException:
|
||||
|
||||
@@ -21,10 +21,12 @@ class CookidooBaseEntity(CoordinatorEntity[CookidooDataUpdateCoordinator]):
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
|
||||
assert coordinator.config_entry.unique_id
|
||||
|
||||
self.device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
name="Cookidoo",
|
||||
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
|
||||
identifiers={(DOMAIN, coordinator.config_entry.unique_id)},
|
||||
manufacturer="Vorwerk International & Co. KmG",
|
||||
model="Cookidoo - Thermomix® recipe portal",
|
||||
)
|
||||
|
||||
37
homeassistant/components/cookidoo/helpers.py
Normal file
37
homeassistant/components/cookidoo/helpers.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Helpers for cookidoo."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from cookidoo_api import Cookidoo, CookidooConfig, get_localization_options
|
||||
|
||||
from homeassistant.const import CONF_COUNTRY, CONF_EMAIL, CONF_LANGUAGE, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .coordinator import CookidooConfigEntry
|
||||
|
||||
|
||||
async def cookidoo_from_config_data(
|
||||
hass: HomeAssistant, data: dict[str, Any]
|
||||
) -> Cookidoo:
|
||||
"""Build cookidoo from config data."""
|
||||
localizations = await get_localization_options(
|
||||
country=data[CONF_COUNTRY].lower(),
|
||||
language=data[CONF_LANGUAGE],
|
||||
)
|
||||
|
||||
return Cookidoo(
|
||||
async_get_clientsession(hass),
|
||||
CookidooConfig(
|
||||
email=data[CONF_EMAIL],
|
||||
password=data[CONF_PASSWORD],
|
||||
localization=localizations[0],
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def cookidoo_from_config_entry(
|
||||
hass: HomeAssistant, entry: CookidooConfigEntry
|
||||
) -> Cookidoo:
|
||||
"""Build cookidoo from config entry."""
|
||||
return await cookidoo_from_config_data(hass, dict(entry.data))
|
||||
@@ -1,5 +1,10 @@
|
||||
{
|
||||
"entity": {
|
||||
"button": {
|
||||
"todo_clear": {
|
||||
"default": "mdi:cart-off"
|
||||
}
|
||||
},
|
||||
"todo": {
|
||||
"ingredient_list": {
|
||||
"default": "mdi:cart-plus"
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/cookidoo",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["cookidoo_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["cookidoo-api==0.10.0"]
|
||||
"requirements": ["cookidoo-api==0.12.2"]
|
||||
}
|
||||
|
||||
@@ -44,10 +44,16 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]",
|
||||
"unique_id_mismatch": "The user identifier does not match the previous identifier"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"button": {
|
||||
"todo_clear": {
|
||||
"name": "Clear shopping list and additional purchases"
|
||||
}
|
||||
},
|
||||
"todo": {
|
||||
"ingredient_list": {
|
||||
"name": "Shopping list"
|
||||
@@ -58,6 +64,9 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"button_clear_todo_failed": {
|
||||
"message": "Failed to clear all items from the Cookidoo shopping list"
|
||||
},
|
||||
"todo_save_item_failed": {
|
||||
"message": "Failed to save {name} to Cookidoo shopping list"
|
||||
},
|
||||
|
||||
@@ -52,7 +52,8 @@ class CookidooIngredientsTodoListEntity(CookidooBaseEntity, TodoListEntity):
|
||||
def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_ingredients"
|
||||
assert coordinator.config_entry.unique_id
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_ingredients"
|
||||
|
||||
@property
|
||||
def todo_items(self) -> list[TodoItem]:
|
||||
@@ -112,7 +113,8 @@ class CookidooAdditionalItemTodoListEntity(CookidooBaseEntity, TodoListEntity):
|
||||
def __init__(self, coordinator: CookidooDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_additional_items"
|
||||
assert coordinator.config_entry.unique_id
|
||||
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_additional_items"
|
||||
|
||||
@property
|
||||
def todo_items(self) -> list[TodoItem]:
|
||||
|
||||
@@ -300,6 +300,10 @@ class CoverEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
def supported_features(self) -> CoverEntityFeature:
|
||||
"""Flag supported features."""
|
||||
if (features := self._attr_supported_features) is not None:
|
||||
if type(features) is int: # noqa: E721
|
||||
new_features = CoverEntityFeature(features)
|
||||
self._report_deprecated_supported_features_values(new_features)
|
||||
return new_features
|
||||
return features
|
||||
|
||||
supported_features = (
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Sequence
|
||||
import os
|
||||
|
||||
from serial.tools.list_ports_common import ListPortInfo
|
||||
@@ -12,7 +13,7 @@ from .const import DONT_USE_USB, MANUAL_PATH, REFRESH_LIST
|
||||
|
||||
|
||||
def list_ports_as_str(
|
||||
serial_ports: list[ListPortInfo], no_usb_option: bool = True
|
||||
serial_ports: Sequence[ListPortInfo], no_usb_option: bool = True
|
||||
) -> list[str]:
|
||||
"""Represent currently available serial ports as string.
|
||||
|
||||
|
||||
@@ -266,7 +266,7 @@ class DeconzBaseLight[_LightDeviceT: Group | Light](
|
||||
@property
|
||||
def color_temp_kelvin(self) -> int | None:
|
||||
"""Return the CT color value."""
|
||||
if self._device.color_temp is None:
|
||||
if self._device.color_temp is None or self._device.color_temp == 0:
|
||||
return None
|
||||
return color_temperature_mired_to_kelvin(self._device.color_temp)
|
||||
|
||||
|
||||
1
homeassistant/components/decorquip/__init__.py
Normal file
1
homeassistant/components/decorquip/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Decorquip."""
|
||||
6
homeassistant/components/decorquip/manifest.json
Normal file
6
homeassistant/components/decorquip/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "decorquip",
|
||||
"name": "Decorquip Dream",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "motion_blinds"
|
||||
}
|
||||
@@ -50,7 +50,7 @@
|
||||
"services": {
|
||||
"get_command": {
|
||||
"name": "Get command",
|
||||
"description": "Send sa generic HTTP get command.",
|
||||
"description": "Sends a generic HTTP get command.",
|
||||
"fields": {
|
||||
"command": {
|
||||
"name": "Command",
|
||||
|
||||
@@ -19,7 +19,7 @@ rules:
|
||||
The integration does not provide any additional actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
@@ -41,7 +41,7 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not provide any additional options.
|
||||
docs-installation-parameters: todo
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pydoods"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.0.0"]
|
||||
"requirements": ["pydoods==1.0.2", "Pillow==11.1.0"]
|
||||
}
|
||||
|
||||
@@ -20,6 +20,7 @@ from dsmr_parser.objects import DSMRObject, MbusDevice, Telegram
|
||||
import serial
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
DOMAIN as SENSOR_DOMAIN,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
@@ -456,24 +457,29 @@ def rename_old_gas_to_mbus(
|
||||
if entity.unique_id.endswith(
|
||||
"belgium_5min_gas_meter_reading"
|
||||
) or entity.unique_id.endswith("hourly_gas_meter_reading"):
|
||||
try:
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=mbus_device_id,
|
||||
device_id=mbus_device_id,
|
||||
)
|
||||
except ValueError:
|
||||
if ent_reg.async_get_entity_id(
|
||||
SENSOR_DOMAIN, DOMAIN, mbus_device_id
|
||||
):
|
||||
LOGGER.debug(
|
||||
"Skip migration of %s because it already exists",
|
||||
entity.entity_id,
|
||||
)
|
||||
else:
|
||||
LOGGER.debug(
|
||||
"Migrated entity %s from unique id %s to %s",
|
||||
entity.entity_id,
|
||||
entity.unique_id,
|
||||
mbus_device_id,
|
||||
)
|
||||
continue
|
||||
new_device = dev_reg.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, mbus_device_id)},
|
||||
)
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id,
|
||||
new_unique_id=mbus_device_id,
|
||||
device_id=new_device.id,
|
||||
)
|
||||
LOGGER.debug(
|
||||
"Migrated entity %s from unique id %s to %s",
|
||||
entity.entity_id,
|
||||
entity.unique_id,
|
||||
mbus_device_id,
|
||||
)
|
||||
# Cleanup old device
|
||||
dev_entities = er.async_entries_for_device(
|
||||
ent_reg, device_id, include_disabled_entities=True
|
||||
|
||||
@@ -57,11 +57,11 @@
|
||||
"services": {
|
||||
"get_gas_prices": {
|
||||
"name": "Get gas prices",
|
||||
"description": "Request gas prices from easyEnergy.",
|
||||
"description": "Requests gas prices from easyEnergy.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "Config Entry",
|
||||
"description": "The config entry to use for this service."
|
||||
"description": "The configuration entry to use for this action."
|
||||
},
|
||||
"incl_vat": {
|
||||
"name": "VAT Included",
|
||||
@@ -79,7 +79,7 @@
|
||||
},
|
||||
"get_energy_usage_prices": {
|
||||
"name": "Get energy usage prices",
|
||||
"description": "Request usage energy prices from easyEnergy.",
|
||||
"description": "Requests usage energy prices from easyEnergy.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::easyenergy::services::get_gas_prices::fields::config_entry::name%]",
|
||||
@@ -101,7 +101,7 @@
|
||||
},
|
||||
"get_energy_return_prices": {
|
||||
"name": "Get energy return prices",
|
||||
"description": "Request return energy prices from easyEnergy.",
|
||||
"description": "Requests return energy prices from easyEnergy.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::easyenergy::services::get_gas_prices::fields::config_entry::name%]",
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.0.1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==10.1.0"]
|
||||
}
|
||||
|
||||
@@ -163,11 +163,6 @@ class EcovacsLegacyVacuum(EcovacsLegacyEntity, StateVacuumEntity):
|
||||
data: dict[str, Any] = {}
|
||||
data[ATTR_ERROR] = self.error
|
||||
|
||||
# these attributes are deprecated and can be removed in 2025.2
|
||||
for key, val in self.device.components.items():
|
||||
attr_name = ATTR_COMPONENT_PREFIX + key
|
||||
data[attr_name] = int(val * 100)
|
||||
|
||||
return data
|
||||
|
||||
def return_to_base(self, **kwargs: Any) -> None:
|
||||
|
||||
@@ -6,11 +6,16 @@ from dataclasses import dataclass
|
||||
|
||||
from elevenlabs import AsyncElevenLabs, Model
|
||||
from elevenlabs.core import ApiError
|
||||
from httpx import ConnectError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_MODEL
|
||||
@@ -48,6 +53,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElevenLabsConfigEntry) -
|
||||
model_id = entry.options[CONF_MODEL]
|
||||
try:
|
||||
model = await get_model_by_id(client, model_id)
|
||||
except ConnectError as err:
|
||||
raise ConfigEntryNotReady("Failed to connect") from err
|
||||
except ApiError as err:
|
||||
raise ConfigEntryAuthFailed("Auth failed") from err
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ rules:
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: >
|
||||
|
||||
@@ -49,7 +49,7 @@ class ElkBinarySensor(ElkAttachedEntity, BinarySensorEntity):
|
||||
_element: Zone
|
||||
_attr_entity_registry_enabled_default = False
|
||||
|
||||
def _element_changed(self, _: Element, changeset: Any) -> None:
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
# Zone in NORMAL state is OFF; any other state is ON
|
||||
self._attr_is_on = bool(
|
||||
self._element.logical_status != ZoneLogicalStatus.NORMAL
|
||||
|
||||
@@ -120,7 +120,7 @@ class ElkCounter(ElkSensor):
|
||||
_attr_icon = "mdi:numeric"
|
||||
_element: Counter
|
||||
|
||||
def _element_changed(self, _: Element, changeset: Any) -> None:
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
self._attr_native_value = self._element.value
|
||||
|
||||
|
||||
@@ -153,7 +153,7 @@ class ElkKeypad(ElkSensor):
|
||||
attrs["last_keypress"] = self._element.last_keypress
|
||||
return attrs
|
||||
|
||||
def _element_changed(self, _: Element, changeset: Any) -> None:
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
self._attr_native_value = temperature_to_state(
|
||||
self._element.temperature, UNDEFINED_TEMPERATURE
|
||||
)
|
||||
@@ -173,7 +173,7 @@ class ElkPanel(ElkSensor):
|
||||
attrs["system_trouble_status"] = self._element.system_trouble_status
|
||||
return attrs
|
||||
|
||||
def _element_changed(self, _: Element, changeset: Any) -> None:
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
if self._elk.is_connected():
|
||||
self._attr_native_value = (
|
||||
"Paused" if self._element.remote_programming_status else "Connected"
|
||||
@@ -188,7 +188,7 @@ class ElkSetting(ElkSensor):
|
||||
_attr_translation_key = "setting"
|
||||
_element: Setting
|
||||
|
||||
def _element_changed(self, _: Element, changeset: Any) -> None:
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
self._attr_native_value = self._element.value
|
||||
|
||||
@property
|
||||
@@ -257,7 +257,7 @@ class ElkZone(ElkSensor):
|
||||
return UnitOfElectricPotential.VOLT
|
||||
return None
|
||||
|
||||
def _element_changed(self, _: Element, changeset: Any) -> None:
|
||||
def _element_changed(self, element: Element, changeset: dict[str, Any]) -> None:
|
||||
if self._element.definition == ZoneType.TEMPERATURE:
|
||||
self._attr_native_value = temperature_to_state(
|
||||
self._element.temperature, UNDEFINED_TEMPERATURE
|
||||
|
||||
@@ -151,7 +151,9 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
port=self._panel_direct_port,
|
||||
)
|
||||
)
|
||||
ssl_context = build_direct_ssl_context(cadata=self._panel_direct_ssl_cert)
|
||||
ssl_context = await self.hass.async_add_executor_job(
|
||||
build_direct_ssl_context, self._panel_direct_ssl_cert
|
||||
)
|
||||
|
||||
# Attempt the connection to make sure the pin works. Also, take the chance to retrieve the panel ID via APIs.
|
||||
client_api_url = get_direct_api_url(
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elmax",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["elmax_api"],
|
||||
"requirements": ["elmax-api==0.0.6.3"],
|
||||
"requirements": ["elmax-api==0.0.6.4rc0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_elmax-ssl._tcp.local."
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["openwebif"],
|
||||
"requirements": ["openwebifpy==4.3.0"]
|
||||
"requirements": ["openwebifpy==4.3.1"]
|
||||
}
|
||||
|
||||
@@ -8,11 +8,7 @@ rules:
|
||||
comment: fixed 1 minute cycle based on Enphase Envoy device characteristics
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
- test_zero_conf_malformed_serial_property - with pytest.raises(KeyError) as ex::
|
||||
I don't believe this should be able to raise a KeyError Shouldn't we abort the flow?
|
||||
config-flow-test-coverage: done
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
@@ -60,11 +56,7 @@ rules:
|
||||
status: done
|
||||
comment: pending https://github.com/home-assistant/core/pull/132373
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
- test_config_different_unique_id -> unique_id set to the mock config entry is an int, not a str
|
||||
- Apart from the coverage, test_option_change_reload does not verify that the config entry is reloaded
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==1.1.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.0.0"]
|
||||
}
|
||||
|
||||
@@ -7,7 +7,6 @@ from typing import TYPE_CHECKING
|
||||
|
||||
from aioesphomeapi import APIClient, DeviceInfo
|
||||
from bleak_esphome import connect_scanner
|
||||
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
|
||||
|
||||
from homeassistant.components.bluetooth import async_register_scanner
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback as hass_callback
|
||||
@@ -28,10 +27,9 @@ def async_connect_scanner(
|
||||
entry_data: RuntimeEntryData,
|
||||
cli: APIClient,
|
||||
device_info: DeviceInfo,
|
||||
cache: ESPHomeBluetoothCache,
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Connect scanner."""
|
||||
client_data = connect_scanner(cli, device_info, cache, entry_data.available)
|
||||
client_data = connect_scanner(cli, device_info, entry_data.available)
|
||||
entry_data.bluetooth_device = client_data.bluetooth_device
|
||||
client_data.disconnect_callbacks = entry_data.disconnect_callbacks
|
||||
scanner = client_data.scanner
|
||||
|
||||
@@ -6,8 +6,6 @@ from dataclasses import dataclass, field
|
||||
from functools import cache
|
||||
from typing import Self
|
||||
|
||||
from bleak_esphome.backend.cache import ESPHomeBluetoothCache
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.json import JSONEncoder
|
||||
|
||||
@@ -22,9 +20,6 @@ class DomainData:
|
||||
"""Define a class that stores global esphome data in hass.data[DOMAIN]."""
|
||||
|
||||
_stores: dict[str, ESPHomeStorage] = field(default_factory=dict)
|
||||
bluetooth_cache: ESPHomeBluetoothCache = field(
|
||||
default_factory=ESPHomeBluetoothCache
|
||||
)
|
||||
|
||||
def get_entry_data(self, entry: ESPHomeConfigEntry) -> RuntimeEntryData:
|
||||
"""Return the runtime entry data associated with this config entry.
|
||||
|
||||
@@ -423,9 +423,7 @@ class ESPHomeManager:
|
||||
|
||||
if device_info.bluetooth_proxy_feature_flags_compat(api_version):
|
||||
entry_data.disconnect_callbacks.add(
|
||||
async_connect_scanner(
|
||||
hass, entry_data, cli, device_info, self.domain_data.bluetooth_cache
|
||||
)
|
||||
async_connect_scanner(hass, entry_data, cli, device_info)
|
||||
)
|
||||
|
||||
if device_info.voice_assistant_feature_flags_compat(api_version) and (
|
||||
|
||||
@@ -18,7 +18,7 @@
|
||||
"requirements": [
|
||||
"aioesphomeapi==28.0.0",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==1.1.0"
|
||||
"bleak-esphome==2.0.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ import feedparser
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -101,7 +102,11 @@ class FeedReaderCoordinator(
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the feed manager."""
|
||||
feed = await self._async_fetch_feed()
|
||||
try:
|
||||
feed = await self._async_fetch_feed()
|
||||
except UpdateFailed as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
self.logger.debug("Feed data fetched from %s : %s", self.url, feed["feed"])
|
||||
if feed_author := feed["feed"].get("author"):
|
||||
self.feed_author = html.unescape(feed_author)
|
||||
|
||||
@@ -23,10 +23,10 @@ from homeassistant.helpers.dispatcher import (
|
||||
async_dispatcher_send,
|
||||
)
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.system_info import is_official_image
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util.signal_type import SignalType
|
||||
from homeassistant.util.system_info import is_official_image
|
||||
|
||||
DOMAIN = "ffmpeg"
|
||||
|
||||
|
||||
@@ -2,10 +2,11 @@
|
||||
|
||||
from datetime import datetime as dt
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import jwt
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth
|
||||
from pyflick.authentication import SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -20,7 +21,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_TOKEN_EXPIRY, DOMAIN
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, CONF_TOKEN_EXPIRY
|
||||
from .coordinator import FlickConfigEntry, FlickElectricDataCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -29,36 +31,85 @@ CONF_ID_TOKEN = "id_token"
|
||||
PLATFORMS = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Set up Flick Electric from a config entry."""
|
||||
auth = HassFlickAuth(hass, entry)
|
||||
|
||||
hass.data.setdefault(DOMAIN, {})
|
||||
hass.data[DOMAIN][entry.entry_id] = FlickAPI(auth)
|
||||
coordinator = FlickElectricDataCoordinator(
|
||||
hass, FlickAPI(auth), entry.data[CONF_SUPPLY_NODE_REF]
|
||||
)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: FlickConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
if unload_ok:
|
||||
hass.data[DOMAIN].pop(entry.entry_id)
|
||||
return unload_ok
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
class HassFlickAuth(AbstractFlickAuth):
|
||||
async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
_LOGGER.debug(
|
||||
"Migrating configuration from version %s.%s",
|
||||
config_entry.version,
|
||||
config_entry.minor_version,
|
||||
)
|
||||
|
||||
if config_entry.version > 2:
|
||||
return False
|
||||
|
||||
if config_entry.version == 1:
|
||||
api = FlickAPI(HassFlickAuth(hass, config_entry))
|
||||
|
||||
accounts = await api.getCustomerAccounts()
|
||||
active_accounts = [
|
||||
account for account in accounts if account["status"] == "active"
|
||||
]
|
||||
|
||||
# A single active account can be auto-migrated
|
||||
if (len(active_accounts)) == 1:
|
||||
account = active_accounts[0]
|
||||
|
||||
new_data = {**config_entry.data}
|
||||
new_data[CONF_ACCOUNT_ID] = account["id"]
|
||||
new_data[CONF_SUPPLY_NODE_REF] = account["main_consumer"]["supply_node_ref"]
|
||||
hass.config_entries.async_update_entry(
|
||||
config_entry,
|
||||
title=account["address"],
|
||||
unique_id=account["id"],
|
||||
data=new_data,
|
||||
version=2,
|
||||
)
|
||||
return True
|
||||
|
||||
config_entry.async_start_reauth(hass, data={**config_entry.data})
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class HassFlickAuth(SimpleFlickAuth):
|
||||
"""Implementation of AbstractFlickAuth based on a Home Assistant entity config."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
def __init__(self, hass: HomeAssistant, entry: FlickConfigEntry) -> None:
|
||||
"""Flick authentication based on a Home Assistant entity config."""
|
||||
super().__init__(aiohttp_client.async_get_clientsession(hass))
|
||||
super().__init__(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
client_id=entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=entry.data.get(CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET),
|
||||
websession=aiohttp_client.async_get_clientsession(hass),
|
||||
)
|
||||
self._entry = entry
|
||||
self._hass = hass
|
||||
|
||||
async def _get_entry_token(self):
|
||||
async def _get_entry_token(self) -> dict[str, Any]:
|
||||
# No token saved, generate one
|
||||
if (
|
||||
CONF_TOKEN_EXPIRY not in self._entry.data
|
||||
@@ -75,13 +126,8 @@ class HassFlickAuth(AbstractFlickAuth):
|
||||
async def _update_token(self):
|
||||
_LOGGER.debug("Fetching new access token")
|
||||
|
||||
token = await self.get_new_token(
|
||||
username=self._entry.data[CONF_USERNAME],
|
||||
password=self._entry.data[CONF_PASSWORD],
|
||||
client_id=self._entry.data.get(CONF_CLIENT_ID, DEFAULT_CLIENT_ID),
|
||||
client_secret=self._entry.data.get(
|
||||
CONF_CLIENT_SECRET, DEFAULT_CLIENT_SECRET
|
||||
),
|
||||
token = await super().get_new_token(
|
||||
self._username, self._password, self._client_id, self._client_secret
|
||||
)
|
||||
|
||||
_LOGGER.debug("New token: %s", token)
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
"""Config Flow for Flick Electric integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyflick.authentication import AuthException, SimpleFlickAuth
|
||||
from aiohttp import ClientResponseError
|
||||
from pyflick import FlickAPI
|
||||
from pyflick.authentication import AbstractFlickAuth, SimpleFlickAuth
|
||||
from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET
|
||||
from pyflick.types import APIException, AuthException, CustomerAccount
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import (
|
||||
CONF_CLIENT_ID,
|
||||
CONF_CLIENT_SECRET,
|
||||
@@ -17,12 +21,18 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.selector import (
|
||||
SelectOptionDict,
|
||||
SelectSelector,
|
||||
SelectSelectorConfig,
|
||||
SelectSelectorMode,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import CONF_ACCOUNT_ID, CONF_SUPPLY_NODE_REF, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
LOGIN_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): str,
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
@@ -35,10 +45,13 @@ DATA_SCHEMA = vol.Schema(
|
||||
class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Flick config flow."""
|
||||
|
||||
VERSION = 1
|
||||
VERSION = 2
|
||||
auth: AbstractFlickAuth
|
||||
accounts: list[CustomerAccount]
|
||||
data: dict[str, Any]
|
||||
|
||||
async def _validate_input(self, user_input):
|
||||
auth = SimpleFlickAuth(
|
||||
async def _validate_auth(self, user_input: Mapping[str, Any]) -> bool:
|
||||
self.auth = SimpleFlickAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
websession=aiohttp_client.async_get_clientsession(self.hass),
|
||||
@@ -48,22 +61,83 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
token = await auth.async_get_access_token()
|
||||
except TimeoutError as err:
|
||||
token = await self.auth.async_get_access_token()
|
||||
except (TimeoutError, ClientResponseError) as err:
|
||||
raise CannotConnect from err
|
||||
except AuthException as err:
|
||||
raise InvalidAuth from err
|
||||
|
||||
return token is not None
|
||||
|
||||
async def async_step_select_account(
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Ask user to select account."""
|
||||
|
||||
errors = {}
|
||||
if user_input is not None and CONF_ACCOUNT_ID in user_input:
|
||||
self.data[CONF_ACCOUNT_ID] = user_input[CONF_ACCOUNT_ID]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
user_input[CONF_ACCOUNT_ID]
|
||||
)
|
||||
try:
|
||||
# Ensure supply node is active
|
||||
await FlickAPI(self.auth).getPricing(self.data[CONF_SUPPLY_NODE_REF])
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
except AuthException:
|
||||
# We should never get here as we have a valid token
|
||||
return self.async_abort(reason="no_permissions")
|
||||
else:
|
||||
# Supply node is active
|
||||
return await self._async_create_entry()
|
||||
|
||||
try:
|
||||
self.accounts = await FlickAPI(self.auth).getCustomerAccounts()
|
||||
except (APIException, ClientResponseError):
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
active_accounts = [a for a in self.accounts if a["status"] == "active"]
|
||||
|
||||
if len(active_accounts) == 0:
|
||||
return self.async_abort(reason="no_accounts")
|
||||
|
||||
if len(active_accounts) == 1:
|
||||
self.data[CONF_ACCOUNT_ID] = active_accounts[0]["id"]
|
||||
self.data[CONF_SUPPLY_NODE_REF] = self._get_supply_node_ref(
|
||||
active_accounts[0]["id"]
|
||||
)
|
||||
|
||||
return await self._async_create_entry()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="select_account",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_ID): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(
|
||||
value=account["id"], label=account["address"]
|
||||
)
|
||||
for account in active_accounts
|
||||
],
|
||||
mode=SelectSelectorMode.LIST,
|
||||
)
|
||||
)
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
self, user_input: Mapping[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle gathering login info."""
|
||||
errors = {}
|
||||
if user_input is not None:
|
||||
try:
|
||||
await self._validate_input(user_input)
|
||||
await self._validate_auth(user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
@@ -72,20 +146,61 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
await self.async_set_unique_id(
|
||||
f"flick_electric_{user_input[CONF_USERNAME]}"
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=f"Flick Electric: {user_input[CONF_USERNAME]}",
|
||||
data=user_input,
|
||||
)
|
||||
self.data = dict(user_input)
|
||||
return await self.async_step_select_account(user_input)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=DATA_SCHEMA, errors=errors
|
||||
step_id="user", data_schema=LOGIN_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, user_input: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle re-authentication."""
|
||||
|
||||
self.data = {**user_input}
|
||||
|
||||
return await self.async_step_user(user_input)
|
||||
|
||||
async def _async_create_entry(self) -> ConfigFlowResult:
|
||||
"""Create an entry for the flow."""
|
||||
|
||||
await self.async_set_unique_id(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
account = self._get_account(self.data[CONF_ACCOUNT_ID])
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
# Migration completed
|
||||
if self._get_reauth_entry().version == 1:
|
||||
self.hass.config_entries.async_update_entry(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
data=self.data,
|
||||
version=self.VERSION,
|
||||
)
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
unique_id=self.unique_id,
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=account["address"],
|
||||
data=self.data,
|
||||
)
|
||||
|
||||
def _get_account(self, account_id: str) -> CustomerAccount:
|
||||
"""Get the account for the account ID."""
|
||||
return next(a for a in self.accounts if a["id"] == account_id)
|
||||
|
||||
def _get_supply_node_ref(self, account_id: str) -> str:
|
||||
"""Get the supply node ref for the account."""
|
||||
return self._get_account(account_id)["main_consumer"][CONF_SUPPLY_NODE_REF]
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -3,6 +3,8 @@
|
||||
DOMAIN = "flick_electric"
|
||||
|
||||
CONF_TOKEN_EXPIRY = "expires"
|
||||
CONF_ACCOUNT_ID = "account_id"
|
||||
CONF_SUPPLY_NODE_REF = "supply_node_ref"
|
||||
|
||||
ATTR_START_AT = "start_at"
|
||||
ATTR_END_AT = "end_at"
|
||||
|
||||
47
homeassistant/components/flick_electric/coordinator.py
Normal file
47
homeassistant/components/flick_electric/coordinator.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Data Coordinator for Flick Electric."""
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import aiohttp
|
||||
from pyflick import FlickAPI, FlickPrice
|
||||
from pyflick.types import APIException, AuthException
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
type FlickConfigEntry = ConfigEntry[FlickElectricDataCoordinator]
|
||||
|
||||
|
||||
class FlickElectricDataCoordinator(DataUpdateCoordinator[FlickPrice]):
|
||||
"""Coordinator for flick power price."""
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, api: FlickAPI, supply_node_ref: str
|
||||
) -> None:
|
||||
"""Initialize FlickElectricDataCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name="Flick Electric",
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
self.supply_node_ref = supply_node_ref
|
||||
self._api = api
|
||||
|
||||
async def _async_update_data(self) -> FlickPrice:
|
||||
"""Fetch pricing data from Flick Electric."""
|
||||
try:
|
||||
async with asyncio.timeout(60):
|
||||
return await self._api.getPricing(self.supply_node_ref)
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
except (APIException, aiohttp.ClientResponseError) as err:
|
||||
raise UpdateFailed from err
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyflick"],
|
||||
"requirements": ["PyFlick==0.0.2"]
|
||||
"requirements": ["PyFlick==1.1.3"]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user