mirror of
https://github.com/home-assistant/core.git
synced 2025-11-12 12:30:31 +00:00
Compare commits
477 Commits
rc
...
claude/tri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
0f3c3e5b0d | ||
|
|
b46640a1c2 | ||
|
|
f7727e8192 | ||
|
|
296c41c46c | ||
|
|
a7d5140f80 | ||
|
|
e8cd2ad1e6 | ||
|
|
10bd2ffc5f | ||
|
|
b9dac02e8e | ||
|
|
8605eb046a | ||
|
|
26b4fa5d39 | ||
|
|
be23d3d43d | ||
|
|
a4fbb597f4 | ||
|
|
ee11fc37d5 | ||
|
|
9900e49bcc | ||
|
|
3feb3fefef | ||
|
|
b3e4f6dc43 | ||
|
|
a9ba0bea8f | ||
|
|
bafa1e250d | ||
|
|
734c6f27c6 | ||
|
|
37eed7fae8 | ||
|
|
37aa4c68d9 | ||
|
|
80f8d94db4 | ||
|
|
7c0a7f4f9d | ||
|
|
5d5d7f7acf | ||
|
|
f0feb93fe1 | ||
|
|
9361ccbfc2 | ||
|
|
e9c76f1053 | ||
|
|
36b100c40a | ||
|
|
60107a1492 | ||
|
|
aedd48c298 | ||
|
|
febbb85532 | ||
|
|
af67a35b75 | ||
|
|
dd34d458f5 | ||
|
|
603d4bcf87 | ||
|
|
2dadc1f2b3 | ||
|
|
936151fae5 | ||
|
|
9760eb7f2b | ||
|
|
7851bed00c | ||
|
|
6aba0b20c6 | ||
|
|
cadfed2348 | ||
|
|
44e2fa6996 | ||
|
|
d0ff617e17 | ||
|
|
8e499569a4 | ||
|
|
5e0ebddd6f | ||
|
|
c0f61f6c2b | ||
|
|
df60de38b0 | ||
|
|
cb086bb8e9 | ||
|
|
ee2e9dc7d6 | ||
|
|
85cd3c68b7 | ||
|
|
1b0b6e63f2 | ||
|
|
12fc79e8d3 | ||
|
|
ca2e7b9509 | ||
|
|
8e8becc43e | ||
|
|
dcec6c3dc8 | ||
|
|
c0e59c4508 | ||
|
|
cd379aadbf | ||
|
|
ccdd54b187 | ||
|
|
3f22dbaa2e | ||
|
|
c18dc0a9ab | ||
|
|
f0e4296d93 | ||
|
|
b3750109c6 | ||
|
|
93025c9845 | ||
|
|
df348644b1 | ||
|
|
8749b0d750 | ||
|
|
a6a1519c06 | ||
|
|
3068e19843 | ||
|
|
55feb1e735 | ||
|
|
bb7dc69131 | ||
|
|
aa9003a524 | ||
|
|
4e9da5249d | ||
|
|
f502739df2 | ||
|
|
0f2ff29378 | ||
|
|
2921e7ed3c | ||
|
|
25d44e8d37 | ||
|
|
0a480a26a3 | ||
|
|
d5da64dd8d | ||
|
|
92adcd8635 | ||
|
|
ee0c4b15c2 | ||
|
|
507f54198e | ||
|
|
0ed342b433 | ||
|
|
363c86faf3 | ||
|
|
095a7ad060 | ||
|
|
ab5981bbbd | ||
|
|
ac2fb53dfd | ||
|
|
02ff5de1ff | ||
|
|
5cd5d480d9 | ||
|
|
a3c7d772fc | ||
|
|
fe0c69dba7 | ||
|
|
e5365234c3 | ||
|
|
1531175bd3 | ||
|
|
62add59ff4 | ||
|
|
d8daca657b | ||
|
|
1891da46ea | ||
|
|
22ae894745 | ||
|
|
160810c69d | ||
|
|
2ae23b920a | ||
|
|
a7edfb082f | ||
|
|
3ac203b05f | ||
|
|
7c3eb19fc4 | ||
|
|
70c6fac743 | ||
|
|
e19d7250d5 | ||
|
|
a850d5dba7 | ||
|
|
0cf0f10654 | ||
|
|
8429f154ca | ||
|
|
7b4f5ad362 | ||
|
|
583b439557 | ||
|
|
05922de102 | ||
|
|
7675a44b90 | ||
|
|
1e4d645683 | ||
|
|
b5ae04605a | ||
|
|
2240d6b94c | ||
|
|
d1536ee636 | ||
|
|
8a926add7a | ||
|
|
31f769900a | ||
|
|
33ad777664 | ||
|
|
59a4e4a337 | ||
|
|
66a39933b0 | ||
|
|
ad395e3bba | ||
|
|
cfc6f2c229 | ||
|
|
63aa41c766 | ||
|
|
037e0e93d3 | ||
|
|
db8b5865b3 | ||
|
|
bd2ccc6672 | ||
|
|
bb63d40cdf | ||
|
|
65285b8885 | ||
|
|
326b8f2b4f | ||
|
|
9f3df52fcc | ||
|
|
875838c277 | ||
|
|
adaafd1fda | ||
|
|
50c5efddaa | ||
|
|
c4be054161 | ||
|
|
61186356f3 | ||
|
|
9d60a19440 | ||
|
|
108c212855 | ||
|
|
ae8db81c4e | ||
|
|
51c970d1d0 | ||
|
|
d2d47cb607 | ||
|
|
b7a5447c8b | ||
|
|
2f80780f75 | ||
|
|
053ec2598f | ||
|
|
85bed4ca77 | ||
|
|
d0d268ffdc | ||
|
|
a709fa5f6c | ||
|
|
0c99638129 | ||
|
|
8a03ab2f64 | ||
|
|
d2ad5b43f2 | ||
|
|
4fae49158c | ||
|
|
36268ffb73 | ||
|
|
1bd70454e1 | ||
|
|
dbc53b99c1 | ||
|
|
9ec3aee8aa | ||
|
|
8d50754056 | ||
|
|
6ee71dae35 | ||
|
|
9605921857 | ||
|
|
6f06eb5ecc | ||
|
|
4ca620e450 | ||
|
|
69c5668b13 | ||
|
|
17fc1c5dbc | ||
|
|
6cfe6ed543 | ||
|
|
b9fb4469d8 | ||
|
|
f53c581845 | ||
|
|
a39710f9bc | ||
|
|
47734f54e8 | ||
|
|
52b3636e52 | ||
|
|
7aced2522a | ||
|
|
5b2d43dffb | ||
|
|
b4d6a44c21 | ||
|
|
adf8644cc3 | ||
|
|
acb6dc9a4f | ||
|
|
463796fb4a | ||
|
|
c74a298b5b | ||
|
|
fb30535730 | ||
|
|
7249a3c846 | ||
|
|
21fce10742 | ||
|
|
ea04c6d88f | ||
|
|
ce6127d87a | ||
|
|
646b1e36bf | ||
|
|
3eac379a13 | ||
|
|
88b6754c73 | ||
|
|
e0aa850d18 | ||
|
|
4a85837f2e | ||
|
|
9002116572 | ||
|
|
7517569ea4 | ||
|
|
cd86c78750 | ||
|
|
a0da295143 | ||
|
|
fd6ca8b081 | ||
|
|
3dea0a917e | ||
|
|
a94c333754 | ||
|
|
7fd482e3d6 | ||
|
|
162c1f1f31 | ||
|
|
f8affb2b6a | ||
|
|
738863ad38 | ||
|
|
59b3e65618 | ||
|
|
3840e50868 | ||
|
|
79339aefed | ||
|
|
df3a4c5916 | ||
|
|
2e21ae0da7 | ||
|
|
6992bfeef9 | ||
|
|
625d7e2e44 | ||
|
|
5af9082dc6 | ||
|
|
902d89b29e | ||
|
|
dfb0ea4202 | ||
|
|
890894b3ae | ||
|
|
dd95921eda | ||
|
|
9479a88393 | ||
|
|
3519611d8e | ||
|
|
1f04e0e655 | ||
|
|
074c1ff775 | ||
|
|
0e28e6a323 | ||
|
|
9f01e0f6ea | ||
|
|
805a03dfd2 | ||
|
|
99bf3a6c6a | ||
|
|
45ea8125d3 | ||
|
|
a606511a7e | ||
|
|
b5f215960f | ||
|
|
dc8ddc0dcc | ||
|
|
c84c098d2c | ||
|
|
f5a4071a81 | ||
|
|
ac5316e3ac | ||
|
|
2a2599de88 | ||
|
|
aac25fa480 | ||
|
|
999acc4273 | ||
|
|
5dcf3d8419 | ||
|
|
9c14853e73 | ||
|
|
5659122f1d | ||
|
|
3671222d7b | ||
|
|
bfd2883a4b | ||
|
|
67156d159f | ||
|
|
45c0891c3b | ||
|
|
0694372c61 | ||
|
|
2bbf4ebc9e | ||
|
|
818b7bb33f | ||
|
|
a265ecfade | ||
|
|
d52749c71a | ||
|
|
5eb5b93c0e | ||
|
|
7c6a39ec91 | ||
|
|
57c3a5c349 | ||
|
|
07c4c58ce4 | ||
|
|
6a07b468a3 | ||
|
|
d63fdf7d35 | ||
|
|
d6eaa9fd7a | ||
|
|
b7c4c28592 | ||
|
|
042a0f7986 | ||
|
|
5cc9e014b2 | ||
|
|
94e30485c4 | ||
|
|
c9e76ae5d4 | ||
|
|
568ed2f0f6 | ||
|
|
5237dc073a | ||
|
|
a09f754b48 | ||
|
|
64ad03ca60 | ||
|
|
b79b443a28 | ||
|
|
02148de9e2 | ||
|
|
d5bd93ebda | ||
|
|
76bbd94f5d | ||
|
|
b5546b4ab9 | ||
|
|
dca9389735 | ||
|
|
d3bebd94aa | ||
|
|
53c807bd5a | ||
|
|
dffbdf15f2 | ||
|
|
e09c35c177 | ||
|
|
0342d295e1 | ||
|
|
eb9849c411 | ||
|
|
93d48fae9d | ||
|
|
d90a7b2345 | ||
|
|
c2f6a364b8 | ||
|
|
bbadd92ffb | ||
|
|
6a7de24a04 | ||
|
|
67ccdd36fb | ||
|
|
2ddf55a60d | ||
|
|
57e7bc81d4 | ||
|
|
777f09598f | ||
|
|
81a9ef1df0 | ||
|
|
d063bc87a1 | ||
|
|
5fce08de65 | ||
|
|
c0db966afd | ||
|
|
9288995cad | ||
|
|
4d2abb4f65 | ||
|
|
60014b6530 | ||
|
|
3b57cab6b4 | ||
|
|
967467664b | ||
|
|
b87b5cffd8 | ||
|
|
bb44987af1 | ||
|
|
8d3ef2b224 | ||
|
|
5e409295f9 | ||
|
|
530c189f9c | ||
|
|
f05fef9588 | ||
|
|
a257b5c54c | ||
|
|
5b9f7372fc | ||
|
|
a4c0a9b3a5 | ||
|
|
7d65b4c941 | ||
|
|
abd0ee7bce | ||
|
|
9e3eb20a04 | ||
|
|
6dc655c3b4 | ||
|
|
9f595a94fb | ||
|
|
5dc215a143 | ||
|
|
306b78ba5f | ||
|
|
bccb646a07 | ||
|
|
4a5dc8cdd6 | ||
|
|
52a751507a | ||
|
|
533b9f969d | ||
|
|
5de7928bc0 | ||
|
|
aad9b07f86 | ||
|
|
3e2c401253 | ||
|
|
762e63d042 | ||
|
|
ec6d40a51c | ||
|
|
47c2c61626 | ||
|
|
73c941f6c5 | ||
|
|
685edb5f76 | ||
|
|
5987b6dcb9 | ||
|
|
cb029e0bb0 | ||
|
|
553ec35947 | ||
|
|
f93940bfa9 | ||
|
|
486f93eb28 | ||
|
|
462db36fef | ||
|
|
485f7f45e8 | ||
|
|
a446d8a98c | ||
|
|
b4a31fc578 | ||
|
|
22321c22cc | ||
|
|
4419c236e2 | ||
|
|
1731a2534c | ||
|
|
ec0edf47b1 | ||
|
|
57c69738e3 | ||
|
|
fb1f258b2b | ||
|
|
d419dd0c05 | ||
|
|
65960aa3f7 | ||
|
|
a25afe2834 | ||
|
|
4cdfa3bddb | ||
|
|
9e7bef9fa7 | ||
|
|
68a1b1f91f | ||
|
|
1659ca532d | ||
|
|
8ea16daae4 | ||
|
|
5bd89acf9a | ||
|
|
2b8db74be4 | ||
|
|
d7f9a7114d | ||
|
|
f7a59eb86e | ||
|
|
37eef965ad | ||
|
|
b706430e66 | ||
|
|
5012aa5cb0 | ||
|
|
1c5f7adf4e | ||
|
|
ff364e3913 | ||
|
|
0e2a4605ff | ||
|
|
ca5b9ce0d3 | ||
|
|
953196ec21 | ||
|
|
b5be3d5ac3 | ||
|
|
5d9e8287d3 | ||
|
|
dc291708ae | ||
|
|
257e82fe4e | ||
|
|
ab6d4d645e | ||
|
|
58ebd84326 | ||
|
|
76b24dafed | ||
|
|
431f563ff6 | ||
|
|
e308e610c6 | ||
|
|
5e77cbd185 | ||
|
|
2dbc7ff4b7 | ||
|
|
49a6c5776d | ||
|
|
98f6001c9c | ||
|
|
ce38a93177 | ||
|
|
92fbf468f2 | ||
|
|
e09ec4a6f3 | ||
|
|
db63e0c829 | ||
|
|
8ed88d4a58 | ||
|
|
d098ada777 | ||
|
|
1add999c5a | ||
|
|
fad217837f | ||
|
|
983af1af7b | ||
|
|
bcf2c4e9b6 | ||
|
|
c72f2fd546 | ||
|
|
f54864a476 | ||
|
|
fe1ff456c6 | ||
|
|
ec25ead5ac | ||
|
|
e8277cb67c | ||
|
|
da0fb37a20 | ||
|
|
28675eee33 | ||
|
|
84561cbc41 | ||
|
|
4e48c881aa | ||
|
|
af8cd0414b | ||
|
|
f54076da29 | ||
|
|
1d0eb97592 | ||
|
|
57f1c268ef | ||
|
|
01402e4f96 | ||
|
|
6137a643d8 | ||
|
|
1badfe3aff | ||
|
|
a549104fe1 | ||
|
|
2aab2ddc55 | ||
|
|
42e01362a5 | ||
|
|
c3cf24ba25 | ||
|
|
7809fb6a9b | ||
|
|
144fc2a443 | ||
|
|
c67e005b2c | ||
|
|
1c6913eec2 | ||
|
|
fb5c4a1375 | ||
|
|
60b8392478 | ||
|
|
7145fb96dd | ||
|
|
37d94aca6d | ||
|
|
9b697edfca | ||
|
|
22e30be946 | ||
|
|
bc9d35b85f | ||
|
|
4dfb6e4983 | ||
|
|
09d78ab5ad | ||
|
|
b2ebdb7ef0 | ||
|
|
83d6a30b2e | ||
|
|
19dee6d22a | ||
|
|
afd27630fb | ||
|
|
cad1f1da1d | ||
|
|
cd62bd86fd | ||
|
|
79c3bc9eca | ||
|
|
10439eea4b | ||
|
|
75cc866e72 | ||
|
|
8b2ca6c571 | ||
|
|
52db73e8e3 | ||
|
|
79d15ec91c | ||
|
|
5af91df2b9 | ||
|
|
89a85c3d8c | ||
|
|
e44c6391b1 | ||
|
|
99d3234855 | ||
|
|
32cc5123f5 | ||
|
|
93415175bb | ||
|
|
f04bb69dbc | ||
|
|
9f8c9940bd | ||
|
|
496f527dff | ||
|
|
385e6f58a8 | ||
|
|
c8c37ad628 | ||
|
|
cc57732e24 | ||
|
|
6011df8952 | ||
|
|
08e494aba5 | ||
|
|
77c428e4c7 | ||
|
|
c22a2b93fa | ||
|
|
7f84363bf4 | ||
|
|
0980c3a270 | ||
|
|
7cec3aa27c | ||
|
|
1ddb39f6d0 | ||
|
|
10d2e38315 | ||
|
|
5299690cb7 | ||
|
|
98c1dca7a8 | ||
|
|
54c022d58a | ||
|
|
77d40ddc7d | ||
|
|
092841ca5e | ||
|
|
70238a613d | ||
|
|
5b8d373527 | ||
|
|
4e3664b26f | ||
|
|
76f5cc368b | ||
|
|
2f4cd21a14 | ||
|
|
d369aa761a | ||
|
|
d795806e3d | ||
|
|
d45a80ed06 | ||
|
|
09b46d22af | ||
|
|
b157afac13 | ||
|
|
edaf5c8167 | ||
|
|
1d6c9e3d94 | ||
|
|
ddbc96206f | ||
|
|
cee5f4e275 | ||
|
|
03a1ffc59b | ||
|
|
6e921a0192 | ||
|
|
99eb48c27f | ||
|
|
06dbfe52d0 | ||
|
|
b516de119c | ||
|
|
dcb2087f4b | ||
|
|
7de94f3632 | ||
|
|
909e2304c1 | ||
|
|
ae0b854314 | ||
|
|
6a6054afee | ||
|
|
3377e90b81 | ||
|
|
342c7f6510 | ||
|
|
982fba167a | ||
|
|
8026e64d7c | ||
|
|
ebbfd5a6c7 | ||
|
|
356077541c | ||
|
|
0b9a22b089 | ||
|
|
cce6f60b70 | ||
|
|
d57dc5d0cd | ||
|
|
6088f5eef5 | ||
|
|
5c96b11479 | ||
|
|
afda849f3e | ||
|
|
f2f769b34a | ||
|
|
45558f3087 | ||
|
|
c10b643af9 | ||
|
|
569dd2d6b7 |
50
.github/workflows/builder.yml
vendored
50
.github/workflows/builder.yml
vendored
@@ -88,6 +88,10 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
exclude:
|
||||
- arch: armv7
|
||||
- arch: armhf
|
||||
- arch: i386
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
@@ -162,18 +166,6 @@ jobs:
|
||||
sed -i "s|home-assistant-intents==.*||" requirements_all.txt
|
||||
fi
|
||||
|
||||
- name: Adjustments for armhf
|
||||
if: matrix.arch == 'armhf'
|
||||
run: |
|
||||
# Pandas has issues building on armhf, it is expected they
|
||||
# will drop the platform in the near future (they consider it
|
||||
# "flimsy" on 386). The following packages depend on pandas,
|
||||
# so we comment them out.
|
||||
sed -i "s|env-canada|# env-canada|g" requirements_all.txt
|
||||
sed -i "s|noaa-coops|# noaa-coops|g" requirements_all.txt
|
||||
sed -i "s|pyezviz|# pyezviz|g" requirements_all.txt
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v6.0.0
|
||||
with:
|
||||
@@ -226,19 +218,11 @@ jobs:
|
||||
- odroid-c4
|
||||
- odroid-m1
|
||||
- odroid-n2
|
||||
- odroid-xu
|
||||
- qemuarm
|
||||
- qemuarm-64
|
||||
- qemux86
|
||||
- qemux86-64
|
||||
- raspberrypi
|
||||
- raspberrypi2
|
||||
- raspberrypi3
|
||||
- raspberrypi3-64
|
||||
- raspberrypi4
|
||||
- raspberrypi4-64
|
||||
- raspberrypi5-64
|
||||
- tinker
|
||||
- yellow
|
||||
- green
|
||||
steps:
|
||||
@@ -297,6 +281,7 @@ jobs:
|
||||
key-description: "Home Assistant Core"
|
||||
version: ${{ needs.init.outputs.version }}
|
||||
channel: ${{ needs.init.outputs.channel }}
|
||||
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||
|
||||
- name: Update version file (stable -> beta)
|
||||
if: needs.init.outputs.channel == 'stable'
|
||||
@@ -306,6 +291,7 @@ jobs:
|
||||
key-description: "Home Assistant Core"
|
||||
version: ${{ needs.init.outputs.version }}
|
||||
channel: beta
|
||||
exclude-list: '["odroid-xu","qemuarm","qemux86","raspberrypi","raspberrypi2","raspberrypi3","raspberrypi4","tinker"]'
|
||||
|
||||
publish_container:
|
||||
name: Publish meta container for ${{ matrix.registry }}
|
||||
@@ -357,27 +343,12 @@ jobs:
|
||||
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/i386-homeassistant:${tag_r}" \
|
||||
--os linux --arch 386
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armhf-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v6
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/armv7-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm --variant=v7
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
@@ -405,23 +376,14 @@ jobs:
|
||||
|
||||
# Pull images from github container registry and verify signature
|
||||
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/i386-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/armhf-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/armv7-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "i386-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armhf-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "armv7-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
|
||||
10
.github/workflows/ci.yaml
vendored
10
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 1
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.11"
|
||||
HA_SHORT_VERSION: "2025.12"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -502,7 +502,6 @@ jobs:
|
||||
libavfilter-dev \
|
||||
libavformat-dev \
|
||||
libavutil-dev \
|
||||
libgammu-dev \
|
||||
libswresample-dev \
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
@@ -623,7 +622,7 @@ jobs:
|
||||
steps:
|
||||
- *checkout
|
||||
- name: Dependency review
|
||||
uses: actions/dependency-review-action@40c09b7dc99638e5ddb0bfd91c1673effc064d8a # v4.8.1
|
||||
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
|
||||
with:
|
||||
license-check: false # We use our own license audit checks
|
||||
|
||||
@@ -801,8 +800,7 @@ jobs:
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev
|
||||
libturbojpeg
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- *cache-restore-python-default
|
||||
@@ -853,7 +851,6 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- *checkout
|
||||
- *setup-python-matrix
|
||||
@@ -1233,7 +1230,6 @@ jobs:
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libgammu-dev \
|
||||
libxml2-utils
|
||||
- *checkout
|
||||
- *setup-python-matrix
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||
uses: github/codeql-action/init@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@4e94bd11f71e507f7f87df81788dff88d1dacbfb # v4.31.0
|
||||
uses: github/codeql-action/analyze@0499de31b99561a6d14a36a5f662c2a54f91beee # v4.31.2
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -228,7 +228,7 @@ jobs:
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
|
||||
@@ -107,6 +107,7 @@ homeassistant.components.automation.*
|
||||
homeassistant.components.awair.*
|
||||
homeassistant.components.axis.*
|
||||
homeassistant.components.azure_storage.*
|
||||
homeassistant.components.backblaze_b2.*
|
||||
homeassistant.components.backup.*
|
||||
homeassistant.components.baf.*
|
||||
homeassistant.components.bang_olufsen.*
|
||||
@@ -395,7 +396,6 @@ homeassistant.components.otbr.*
|
||||
homeassistant.components.overkiz.*
|
||||
homeassistant.components.overseerr.*
|
||||
homeassistant.components.p1_monitor.*
|
||||
homeassistant.components.pandora.*
|
||||
homeassistant.components.panel_custom.*
|
||||
homeassistant.components.paperless_ngx.*
|
||||
homeassistant.components.peblar.*
|
||||
|
||||
20
CODEOWNERS
generated
20
CODEOWNERS
generated
@@ -196,6 +196,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/azure_service_bus/ @hfurubotten
|
||||
/homeassistant/components/azure_storage/ @zweckj
|
||||
/tests/components/azure_storage/ @zweckj
|
||||
/homeassistant/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
|
||||
/tests/components/backblaze_b2/ @hugo-vrijswijk @ElCruncharino
|
||||
/homeassistant/components/backup/ @home-assistant/core
|
||||
/tests/components/backup/ @home-assistant/core
|
||||
/homeassistant/components/baf/ @bdraco @jfroy
|
||||
@@ -316,8 +318,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/cpuspeed/ @fabaff
|
||||
/homeassistant/components/crownstone/ @Crownstone @RicArch97
|
||||
/tests/components/crownstone/ @Crownstone @RicArch97
|
||||
/homeassistant/components/cups/ @fabaff
|
||||
/tests/components/cups/ @fabaff
|
||||
/homeassistant/components/cync/ @Kinachi249
|
||||
/tests/components/cync/ @Kinachi249
|
||||
/homeassistant/components/daikin/ @fredrike
|
||||
@@ -510,8 +510,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/fjaraskupan/ @elupus
|
||||
/homeassistant/components/flexit_bacnet/ @lellky @piotrbulinski
|
||||
/tests/components/flexit_bacnet/ @lellky @piotrbulinski
|
||||
/homeassistant/components/flick_electric/ @ZephireNZ
|
||||
/tests/components/flick_electric/ @ZephireNZ
|
||||
/homeassistant/components/flipr/ @cnico
|
||||
/tests/components/flipr/ @cnico
|
||||
/homeassistant/components/flo/ @dmulcahey
|
||||
@@ -1019,8 +1017,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant
|
||||
/tests/components/music_assistant/ @music-assistant
|
||||
/homeassistant/components/music_assistant/ @music-assistant @arturpragacz
|
||||
/tests/components/music_assistant/ @music-assistant @arturpragacz
|
||||
/homeassistant/components/mutesync/ @currentoor
|
||||
/tests/components/mutesync/ @currentoor
|
||||
/homeassistant/components/my/ @home-assistant/core
|
||||
@@ -1479,8 +1477,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/smhi/ @gjohansson-ST
|
||||
/homeassistant/components/smlight/ @tl-sl
|
||||
/tests/components/smlight/ @tl-sl
|
||||
/homeassistant/components/sms/ @ocalvo
|
||||
/tests/components/sms/ @ocalvo
|
||||
/homeassistant/components/snapcast/ @luar123
|
||||
/tests/components/snapcast/ @luar123
|
||||
/homeassistant/components/snmp/ @nmaggioni
|
||||
@@ -1721,8 +1717,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vallox/ @andre-richter @slovdahl @viiru- @yozik04
|
||||
/homeassistant/components/valve/ @home-assistant/core
|
||||
/tests/components/valve/ @home-assistant/core
|
||||
/homeassistant/components/vegehub/ @ghowevege
|
||||
/tests/components/vegehub/ @ghowevege
|
||||
/homeassistant/components/vegehub/ @thulrus
|
||||
/tests/components/vegehub/ @thulrus
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
@@ -1821,8 +1817,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xbox/ @hunterjm @tr4nt0r
|
||||
/tests/components/xbox/ @hunterjm @tr4nt0r
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/tests/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
/homeassistant/components/xiaomi_ble/ @Jc2k @Ernst79
|
||||
|
||||
@@ -13,7 +13,6 @@ RUN \
|
||||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavutil-dev \
|
||||
libgammu-dev \
|
||||
libswscale-dev \
|
||||
libswresample-dev \
|
||||
libavfilter-dev \
|
||||
|
||||
@@ -6,7 +6,6 @@ Sending HOTP through notify service
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections import OrderedDict
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
@@ -304,14 +303,15 @@ class NotifySetupFlow(SetupFlow[NotifyAuthModule]):
|
||||
if not self._available_notify_services:
|
||||
return self.async_abort(reason="no_available_service")
|
||||
|
||||
schema: dict[str, Any] = OrderedDict()
|
||||
schema["notify_service"] = vol.In(self._available_notify_services)
|
||||
schema["target"] = vol.Optional(str)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init", data_schema=vol.Schema(schema), errors=errors
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required("notify_service"): vol.In(self._available_notify_services),
|
||||
vol.Optional("target"): str,
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(step_id="init", data_schema=schema, errors=errors)
|
||||
|
||||
async def async_step_setup(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> FlowResult:
|
||||
|
||||
@@ -1,11 +1,5 @@
|
||||
{
|
||||
"domain": "yale",
|
||||
"name": "Yale",
|
||||
"integrations": [
|
||||
"august",
|
||||
"yale_smart_alarm",
|
||||
"yalexs_ble",
|
||||
"yale_home",
|
||||
"yale"
|
||||
]
|
||||
"name": "Yale (non-US/Canada)",
|
||||
"integrations": ["yale", "yalexs_ble", "yale_smart_alarm"]
|
||||
}
|
||||
|
||||
5
homeassistant/brands/yale_august.json
Normal file
5
homeassistant/brands/yale_august.json
Normal file
@@ -0,0 +1,5 @@
|
||||
{
|
||||
"domain": "yale_august",
|
||||
"name": "Yale August (US/Canada)",
|
||||
"integrations": ["august", "august_ble"]
|
||||
}
|
||||
@@ -17,6 +17,11 @@ from homeassistant.const import (
|
||||
CONF_UNIQUE_ID,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
ACCOUNT_ID,
|
||||
@@ -66,7 +71,15 @@ class AdaxConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the local step."""
|
||||
data_schema = vol.Schema(
|
||||
{vol.Required(WIFI_SSID): str, vol.Required(WIFI_PSWD): str}
|
||||
{
|
||||
vol.Required(WIFI_SSID): str,
|
||||
vol.Required(WIFI_PSWD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD,
|
||||
autocomplete="current-password",
|
||||
),
|
||||
),
|
||||
}
|
||||
)
|
||||
if user_input is None:
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -2,14 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from typing import cast
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfEnergy
|
||||
from homeassistant.const import UnitOfEnergy, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -20,44 +22,74 @@ from .const import CONNECTION_TYPE, DOMAIN, LOCAL
|
||||
from .coordinator import AdaxCloudCoordinator
|
||||
|
||||
|
||||
@dataclass(kw_only=True, frozen=True)
|
||||
class AdaxSensorDescription(SensorEntityDescription):
|
||||
"""Describes Adax sensor entity."""
|
||||
|
||||
data_key: str
|
||||
|
||||
|
||||
SENSORS: tuple[AdaxSensorDescription, ...] = (
|
||||
AdaxSensorDescription(
|
||||
key="temperature",
|
||||
data_key="temperature",
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
suggested_display_precision=1,
|
||||
),
|
||||
AdaxSensorDescription(
|
||||
key="energy",
|
||||
data_key="energyWh",
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
|
||||
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
suggested_display_precision=3,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AdaxConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Adax energy sensors with config flow."""
|
||||
"""Set up the Adax sensors with config flow."""
|
||||
if entry.data.get(CONNECTION_TYPE) != LOCAL:
|
||||
cloud_coordinator = cast(AdaxCloudCoordinator, entry.runtime_data)
|
||||
|
||||
# Create individual energy sensors for each device
|
||||
async_add_entities(
|
||||
AdaxEnergySensor(cloud_coordinator, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
[
|
||||
AdaxSensor(cloud_coordinator, entity_description, device_id)
|
||||
for device_id in cloud_coordinator.data
|
||||
for entity_description in SENSORS
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||
"""Representation of an Adax energy sensor."""
|
||||
class AdaxSensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||
"""Representation of an Adax sensor."""
|
||||
|
||||
entity_description: AdaxSensorDescription
|
||||
_attr_has_entity_name = True
|
||||
_attr_translation_key = "energy"
|
||||
_attr_device_class = SensorDeviceClass.ENERGY
|
||||
_attr_native_unit_of_measurement = UnitOfEnergy.WATT_HOUR
|
||||
_attr_suggested_unit_of_measurement = UnitOfEnergy.KILO_WATT_HOUR
|
||||
_attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
_attr_suggested_display_precision = 3
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AdaxCloudCoordinator,
|
||||
entity_description: AdaxSensorDescription,
|
||||
device_id: str,
|
||||
) -> None:
|
||||
"""Initialize the energy sensor."""
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._device_id = device_id
|
||||
room = coordinator.data[device_id]
|
||||
|
||||
self._attr_unique_id = f"{room['homeId']}_{device_id}_energy"
|
||||
self._attr_unique_id = (
|
||||
f"{room['homeId']}_{device_id}_{self.entity_description.key}"
|
||||
)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=room["name"],
|
||||
@@ -68,10 +100,14 @@ class AdaxEnergySensor(CoordinatorEntity[AdaxCloudCoordinator], SensorEntity):
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return (
|
||||
super().available and "energyWh" in self.coordinator.data[self._device_id]
|
||||
super().available
|
||||
and self.entity_description.data_key
|
||||
in self.coordinator.data[self._device_id]
|
||||
)
|
||||
|
||||
@property
|
||||
def native_value(self) -> int:
|
||||
def native_value(self) -> int | float | None:
|
||||
"""Return the native value of the sensor."""
|
||||
return int(self.coordinator.data[self._device_id]["energyWh"])
|
||||
return self.coordinator.data[self._device_id].get(
|
||||
self.entity_description.data_key
|
||||
)
|
||||
|
||||
@@ -30,6 +30,7 @@ generate_data:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
generate_image:
|
||||
fields:
|
||||
task_name:
|
||||
@@ -57,3 +58,4 @@ generate_image:
|
||||
media:
|
||||
accept:
|
||||
- "*"
|
||||
multiple: true
|
||||
|
||||
@@ -143,5 +143,28 @@
|
||||
"name": "Trigger"
|
||||
}
|
||||
},
|
||||
"title": "Alarm control panel"
|
||||
"title": "Alarm control panel",
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"description": "Triggers when an alarm is armed.",
|
||||
"description_configured": "Triggers when an alarm is armed",
|
||||
"fields": {
|
||||
"mode": {
|
||||
"description": "The arm modes to trigger on. If empty, triggers on all arm modes.",
|
||||
"name": "Arm modes"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed"
|
||||
},
|
||||
"disarmed": {
|
||||
"description": "Triggers when an alarm is disarmed.",
|
||||
"description_configured": "Triggers when an alarm is disarmed",
|
||||
"name": "When an alarm is disarmed"
|
||||
},
|
||||
"triggered": {
|
||||
"description": "Triggers when an alarm is triggered.",
|
||||
"description_configured": "Triggers when an alarm is triggered",
|
||||
"name": "When an alarm is triggered"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
283
homeassistant/components/alarm_control_panel/trigger.py
Normal file
283
homeassistant/components/alarm_control_panel/trigger.py
Normal file
@@ -0,0 +1,283 @@
|
||||
"""Provides triggers for alarm control panels."""
|
||||
|
||||
from typing import TYPE_CHECKING, cast, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_OPTIONS,
|
||||
CONF_TARGET,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.target import (
|
||||
TargetStateChangedData,
|
||||
async_track_target_selector_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelState
|
||||
|
||||
CONF_MODE = "mode"
|
||||
|
||||
ARMED_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {
|
||||
vol.Optional(CONF_MODE, default=[]): vol.All(
|
||||
cv.ensure_list,
|
||||
[
|
||||
vol.In(
|
||||
[
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
]
|
||||
)
|
||||
],
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
DISARMED_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
TRIGGERED_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class AlarmArmedTrigger(Trigger):
|
||||
"""Trigger for when an alarm control panel is armed."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, ARMED_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the alarm armed trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
mode_filter = self._options[CONF_MODE]
|
||||
|
||||
# All armed states
|
||||
armed_states = {
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
}
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if the new state is an armed state
|
||||
if to_state.state not in armed_states:
|
||||
return
|
||||
|
||||
# If mode filter is specified, check if the mode matches
|
||||
if mode_filter and to_state.state not in mode_filter:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"alarm armed on {entity_id}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class AlarmDisarmedTrigger(Trigger):
|
||||
"""Trigger for when an alarm control panel is disarmed."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, DISARMED_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the alarm disarmed trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if the new state is disarmed
|
||||
if to_state.state != AlarmControlPanelState.DISARMED:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"alarm disarmed on {entity_id}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class AlarmTriggeredTrigger(Trigger):
|
||||
"""Trigger for when an alarm control panel is triggered."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, TRIGGERED_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the alarm triggered trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if the new state is triggered
|
||||
if to_state.state != AlarmControlPanelState.TRIGGERED:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"alarm triggered on {entity_id}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"armed": AlarmArmedTrigger,
|
||||
"disarmed": AlarmDisarmedTrigger,
|
||||
"triggered": AlarmTriggeredTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for alarm control panels."""
|
||||
return TRIGGERS
|
||||
30
homeassistant/components/alarm_control_panel/triggers.yaml
Normal file
30
homeassistant/components/alarm_control_panel/triggers.yaml
Normal file
@@ -0,0 +1,30 @@
|
||||
armed:
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
fields:
|
||||
mode:
|
||||
required: false
|
||||
default: []
|
||||
selector:
|
||||
select:
|
||||
multiple: true
|
||||
options:
|
||||
- value: armed_home
|
||||
label: Home
|
||||
- value: armed_away
|
||||
label: Away
|
||||
- value: armed_night
|
||||
label: Night
|
||||
- value: armed_vacation
|
||||
label: Vacation
|
||||
- value: armed_custom_bypass
|
||||
label: Custom bypass
|
||||
disarmed:
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
triggered:
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
@@ -58,7 +58,10 @@ from homeassistant.const import (
|
||||
from homeassistant.helpers import network
|
||||
from homeassistant.util import color as color_util, dt as dt_util
|
||||
from homeassistant.util.decorator import Registry
|
||||
from homeassistant.util.unit_conversion import TemperatureConverter
|
||||
from homeassistant.util.unit_conversion import (
|
||||
TemperatureConverter,
|
||||
TemperatureDeltaConverter,
|
||||
)
|
||||
|
||||
from .config import AbstractConfig
|
||||
from .const import (
|
||||
@@ -844,7 +847,7 @@ def temperature_from_object(
|
||||
temp -= 273.15
|
||||
|
||||
if interval:
|
||||
return TemperatureConverter.convert_interval(temp, from_unit, to_unit)
|
||||
return TemperatureDeltaConverter.convert(temp, from_unit, to_unit)
|
||||
return TemperatureConverter.convert(temp, from_unit, to_unit)
|
||||
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
from typing import Any
|
||||
|
||||
from aioamazondevices.structures import AmazonDevice
|
||||
@@ -60,5 +61,5 @@ def build_device_data(device: AmazonDevice) -> dict[str, Any]:
|
||||
"online": device.online,
|
||||
"serial number": device.serial_number,
|
||||
"software version": device.software_version,
|
||||
"sensors": device.sensors,
|
||||
"sensors": {key: asdict(sensor) for key, sensor in device.sensors.items()},
|
||||
}
|
||||
|
||||
@@ -9,14 +9,14 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_SITE_ID, DOMAIN, PLATFORMS
|
||||
from .coordinator import AmberConfigEntry, AmberUpdateCoordinator
|
||||
from .services import setup_services
|
||||
from .services import async_setup_services
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Amber component."""
|
||||
setup_services(hass)
|
||||
async_setup_services(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
ServiceResponse,
|
||||
SupportsResponse,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
@@ -102,7 +103,8 @@ def get_forecasts(channel_type: str, data: dict) -> list[JsonValueType]:
|
||||
return results
|
||||
|
||||
|
||||
def setup_services(hass: HomeAssistant) -> None:
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
"""Set up the services for the Amber integration."""
|
||||
|
||||
async def handle_get_forecasts(call: ServiceCall) -> ServiceResponse:
|
||||
|
||||
@@ -39,11 +39,11 @@ from .const import (
|
||||
CONF_TURN_OFF_COMMAND,
|
||||
CONF_TURN_ON_COMMAND,
|
||||
DEFAULT_ADB_SERVER_PORT,
|
||||
DEFAULT_DEVICE_CLASS,
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS,
|
||||
DEFAULT_GET_SOURCES,
|
||||
DEFAULT_PORT,
|
||||
DEFAULT_SCREENCAP_INTERVAL,
|
||||
DEVICE_AUTO,
|
||||
DEVICE_CLASSES,
|
||||
DOMAIN,
|
||||
PROP_ETHMAC,
|
||||
@@ -89,8 +89,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default=host): str,
|
||||
vol.Required(CONF_DEVICE_CLASS, default=DEFAULT_DEVICE_CLASS): vol.In(
|
||||
DEVICE_CLASSES
|
||||
vol.Required(CONF_DEVICE_CLASS, default=DEVICE_AUTO): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=[
|
||||
SelectOptionDict(value=k, label=v)
|
||||
for k, v in DEVICE_CLASSES.items()
|
||||
],
|
||||
translation_key="device_class",
|
||||
)
|
||||
),
|
||||
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
},
|
||||
|
||||
@@ -15,15 +15,19 @@ CONF_TURN_OFF_COMMAND = "turn_off_command"
|
||||
CONF_TURN_ON_COMMAND = "turn_on_command"
|
||||
|
||||
DEFAULT_ADB_SERVER_PORT = 5037
|
||||
DEFAULT_DEVICE_CLASS = "auto"
|
||||
DEFAULT_EXCLUDE_UNNAMED_APPS = False
|
||||
DEFAULT_GET_SOURCES = True
|
||||
DEFAULT_PORT = 5555
|
||||
DEFAULT_SCREENCAP_INTERVAL = 5
|
||||
|
||||
DEVICE_AUTO = "auto"
|
||||
DEVICE_ANDROIDTV = "androidtv"
|
||||
DEVICE_FIRETV = "firetv"
|
||||
DEVICE_CLASSES = [DEFAULT_DEVICE_CLASS, DEVICE_ANDROIDTV, DEVICE_FIRETV]
|
||||
DEVICE_CLASSES = {
|
||||
DEVICE_AUTO: "auto",
|
||||
DEVICE_ANDROIDTV: "Android TV",
|
||||
DEVICE_FIRETV: "Fire TV",
|
||||
}
|
||||
|
||||
PROP_ETHMAC = "ethmac"
|
||||
PROP_SERIALNO = "serialno"
|
||||
|
||||
@@ -65,6 +65,13 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"device_class": {
|
||||
"options": {
|
||||
"auto": "Auto-detect device type"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"adb_command": {
|
||||
"description": "Sends an ADB command to an Android / Fire TV device.",
|
||||
|
||||
@@ -25,7 +25,7 @@ from .const import (
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
)
|
||||
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
80
homeassistant/components/anthropic/ai_task.py
Normal file
80
homeassistant/components/anthropic/ai_task.py
Normal file
@@ -0,0 +1,80 @@
|
||||
"""AI Task integration for Anthropic."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
|
||||
from homeassistant.components import ai_task, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .entity import AnthropicBaseLLMEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up AI Task entities."""
|
||||
for subentry in config_entry.subentries.values():
|
||||
if subentry.subentry_type != "ai_task_data":
|
||||
continue
|
||||
|
||||
async_add_entities(
|
||||
[AnthropicTaskEntity(config_entry, subentry)],
|
||||
config_subentry_id=subentry.subentry_id,
|
||||
)
|
||||
|
||||
|
||||
class AnthropicTaskEntity(
|
||||
ai_task.AITaskEntity,
|
||||
AnthropicBaseLLMEntity,
|
||||
):
|
||||
"""Anthropic AI Task entity."""
|
||||
|
||||
_attr_supported_features = (
|
||||
ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
|
||||
)
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
task: ai_task.GenDataTask,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenDataTaskResult:
|
||||
"""Handle a generate data task."""
|
||||
await self._async_handle_chat_log(chat_log, task.name, task.structure)
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
raise HomeAssistantError(
|
||||
"Last content in chat log is not an AssistantContent"
|
||||
)
|
||||
|
||||
text = chat_log.content[-1].content or ""
|
||||
|
||||
if not task.structure:
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=text,
|
||||
)
|
||||
try:
|
||||
data = json_loads(text)
|
||||
except JSONDecodeError as err:
|
||||
_LOGGER.error(
|
||||
"Failed to parse JSON response: %s. Response: %s",
|
||||
err,
|
||||
text,
|
||||
)
|
||||
raise HomeAssistantError("Error with Claude structured response") from err
|
||||
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=data,
|
||||
)
|
||||
@@ -2,11 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
from typing import Any
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
@@ -38,6 +37,7 @@ from homeassistant.helpers.selector import (
|
||||
SelectSelectorConfig,
|
||||
TemplateSelector,
|
||||
)
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
@@ -53,8 +53,10 @@ from .const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
@@ -73,12 +75,16 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
RECOMMENDED_OPTIONS = {
|
||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
RECOMMENDED_AI_TASK_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
"""Validate the user input allows us to connect.
|
||||
@@ -101,7 +107,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors = {}
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(user_input)
|
||||
@@ -129,10 +135,16 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_OPTIONS,
|
||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
}
|
||||
},
|
||||
{
|
||||
"subentry_type": "ai_task_data",
|
||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||
"title": DEFAULT_AI_TASK_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
],
|
||||
)
|
||||
|
||||
@@ -146,101 +158,240 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"conversation": ConversationSubentryFlowHandler}
|
||||
return {
|
||||
"conversation": ConversationSubentryFlowHandler,
|
||||
"ai_task_data": ConversationSubentryFlowHandler,
|
||||
}
|
||||
|
||||
|
||||
class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Flow for managing conversation subentries."""
|
||||
|
||||
last_rendered_recommended = False
|
||||
options: dict[str, Any]
|
||||
|
||||
@property
|
||||
def _is_new(self) -> bool:
|
||||
"""Return if this is a new subentry."""
|
||||
return self.source == "user"
|
||||
|
||||
async def async_step_set_options(
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Set conversation options."""
|
||||
"""Add a subentry."""
|
||||
if self._subentry_type == "ai_task_data":
|
||||
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
|
||||
else:
|
||||
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
||||
return await self.async_step_init()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Handle reconfiguration of a subentry."""
|
||||
self.options = self._get_reconfigure_subentry().data.copy()
|
||||
return await self.async_step_init()
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Set initial options."""
|
||||
# abort if entry is not loaded
|
||||
if self._get_entry().state != ConfigEntryState.LOADED:
|
||||
return self.async_abort(reason="entry_not_loaded")
|
||||
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
)
|
||||
for api in llm.async_get_apis(self.hass)
|
||||
]
|
||||
if (suggested_llm_apis := self.options.get(CONF_LLM_HASS_API)) and isinstance(
|
||||
suggested_llm_apis, str
|
||||
):
|
||||
self.options[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||
|
||||
step_schema: VolDictType = {}
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is None:
|
||||
if self._is_new:
|
||||
options = RECOMMENDED_OPTIONS.copy()
|
||||
if self._is_new:
|
||||
if self._subentry_type == "ai_task_data":
|
||||
default_name = DEFAULT_AI_TASK_NAME
|
||||
else:
|
||||
# If this is a reconfiguration, we need to copy the existing options
|
||||
# so that we can show the current values in the form.
|
||||
options = self._get_reconfigure_subentry().data.copy()
|
||||
default_name = DEFAULT_CONVERSATION_NAME
|
||||
step_schema[vol.Required(CONF_NAME, default=default_name)] = str
|
||||
|
||||
self.last_rendered_recommended = cast(
|
||||
bool, options.get(CONF_RECOMMENDED, False)
|
||||
if self._subentry_type == "conversation":
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(options=hass_apis, multiple=True)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
elif user_input[CONF_RECOMMENDED] == self.last_rendered_recommended:
|
||||
step_schema[
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=self.options.get(CONF_RECOMMENDED, False)
|
||||
)
|
||||
] = bool
|
||||
|
||||
if user_input is not None:
|
||||
if not user_input.get(CONF_LLM_HASS_API):
|
||||
user_input.pop(CONF_LLM_HASS_API, None)
|
||||
if user_input.get(
|
||||
CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET
|
||||
) >= user_input.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS):
|
||||
errors[CONF_THINKING_BUDGET] = "thinking_budget_too_large"
|
||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH):
|
||||
model = user_input.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
if model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||
errors[CONF_WEB_SEARCH] = "web_search_unsupported_model"
|
||||
elif user_input.get(
|
||||
|
||||
if user_input[CONF_RECOMMENDED]:
|
||||
if not errors:
|
||||
if self._is_new:
|
||||
return self.async_create_entry(
|
||||
title=user_input.pop(CONF_NAME),
|
||||
data=user_input,
|
||||
)
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
data=user_input,
|
||||
)
|
||||
else:
|
||||
self.options.update(user_input)
|
||||
if (
|
||||
CONF_LLM_HASS_API in self.options
|
||||
and CONF_LLM_HASS_API not in user_input
|
||||
):
|
||||
self.options.pop(CONF_LLM_HASS_API)
|
||||
if not errors:
|
||||
return await self.async_step_advanced()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="init",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(step_schema), self.options
|
||||
),
|
||||
errors=errors or None,
|
||||
)
|
||||
|
||||
async def async_step_advanced(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Manage advanced options."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
step_schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_CHAT_MODEL,
|
||||
default=RECOMMENDED_CHAT_MODEL,
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_MAX_TOKENS,
|
||||
default=RECOMMENDED_MAX_TOKENS,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
}
|
||||
|
||||
if user_input is not None:
|
||||
self.options.update(user_input)
|
||||
|
||||
if not errors:
|
||||
return await self.async_step_model()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="advanced",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(step_schema), self.options
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_model(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Manage model-specific options."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
step_schema: VolDictType = {}
|
||||
|
||||
model = self.options[CONF_CHAT_MODEL]
|
||||
|
||||
if not model.startswith(tuple(NON_THINKING_MODELS)):
|
||||
step_schema[
|
||||
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
|
||||
] = vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0,
|
||||
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
)
|
||||
else:
|
||||
self.options.pop(CONF_THINKING_BUDGET, None)
|
||||
|
||||
if not model.startswith(tuple(WEB_SEARCH_UNSUPPORTED_MODELS)):
|
||||
step_schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=RECOMMENDED_WEB_SEARCH,
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
else:
|
||||
self.options.pop(CONF_WEB_SEARCH, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_MAX_USES, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_USER_LOCATION, None)
|
||||
|
||||
self.options.pop(CONF_WEB_SEARCH_CITY, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_REGION, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_COUNTRY, None)
|
||||
self.options.pop(CONF_WEB_SEARCH_TIMEZONE, None)
|
||||
|
||||
if not step_schema:
|
||||
user_input = {}
|
||||
|
||||
if user_input is not None:
|
||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH) and not errors:
|
||||
if user_input.get(
|
||||
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||
):
|
||||
user_input.update(await self._get_location_data())
|
||||
|
||||
self.options.update(user_input)
|
||||
|
||||
if not errors:
|
||||
if self._is_new:
|
||||
return self.async_create_entry(
|
||||
title=user_input.pop(CONF_NAME),
|
||||
data=user_input,
|
||||
title=self.options.pop(CONF_NAME),
|
||||
data=self.options,
|
||||
)
|
||||
|
||||
return self.async_update_and_abort(
|
||||
self._get_entry(),
|
||||
self._get_reconfigure_subentry(),
|
||||
data=user_input,
|
||||
data=self.options,
|
||||
)
|
||||
|
||||
options = user_input
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
else:
|
||||
# Re-render the options again, now with the recommended options shown/hidden
|
||||
self.last_rendered_recommended = user_input[CONF_RECOMMENDED]
|
||||
|
||||
options = {
|
||||
CONF_RECOMMENDED: user_input[CONF_RECOMMENDED],
|
||||
CONF_PROMPT: user_input[CONF_PROMPT],
|
||||
CONF_LLM_HASS_API: user_input.get(CONF_LLM_HASS_API),
|
||||
}
|
||||
|
||||
suggested_values = options.copy()
|
||||
if not suggested_values.get(CONF_PROMPT):
|
||||
suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT
|
||||
if (
|
||||
suggested_llm_apis := suggested_values.get(CONF_LLM_HASS_API)
|
||||
) and isinstance(suggested_llm_apis, str):
|
||||
suggested_values[CONF_LLM_HASS_API] = [suggested_llm_apis]
|
||||
|
||||
schema = self.add_suggested_values_to_schema(
|
||||
vol.Schema(
|
||||
anthropic_config_option_schema(self.hass, self._is_new, options)
|
||||
),
|
||||
suggested_values,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="set_options",
|
||||
data_schema=schema,
|
||||
step_id="model",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(step_schema), self.options
|
||||
),
|
||||
errors=errors or None,
|
||||
last_step=True,
|
||||
)
|
||||
|
||||
async def _get_location_data(self) -> dict[str, str]:
|
||||
@@ -304,77 +455,3 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
_LOGGER.debug("Location data: %s", location_data)
|
||||
|
||||
return location_data
|
||||
|
||||
async_step_user = async_step_set_options
|
||||
async_step_reconfigure = async_step_set_options
|
||||
|
||||
|
||||
def anthropic_config_option_schema(
|
||||
hass: HomeAssistant,
|
||||
is_new: bool,
|
||||
options: Mapping[str, Any],
|
||||
) -> dict:
|
||||
"""Return a schema for Anthropic completion options."""
|
||||
hass_apis: list[SelectOptionDict] = [
|
||||
SelectOptionDict(
|
||||
label=api.name,
|
||||
value=api.id,
|
||||
)
|
||||
for api in llm.async_get_apis(hass)
|
||||
]
|
||||
|
||||
if is_new:
|
||||
schema: dict[vol.Required | vol.Optional, Any] = {
|
||||
vol.Required(CONF_NAME, default=DEFAULT_CONVERSATION_NAME): str,
|
||||
}
|
||||
else:
|
||||
schema = {}
|
||||
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(CONF_PROMPT): TemplateSelector(),
|
||||
vol.Optional(
|
||||
CONF_LLM_HASS_API,
|
||||
): SelectSelector(SelectSelectorConfig(options=hass_apis, multiple=True)),
|
||||
vol.Required(
|
||||
CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False)
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
|
||||
if options.get(CONF_RECOMMENDED):
|
||||
return schema
|
||||
|
||||
schema.update(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CHAT_MODEL,
|
||||
default=RECOMMENDED_CHAT_MODEL,
|
||||
): str,
|
||||
vol.Optional(
|
||||
CONF_MAX_TOKENS,
|
||||
default=RECOMMENDED_MAX_TOKENS,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET,
|
||||
default=RECOMMENDED_THINKING_BUDGET,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=RECOMMENDED_WEB_SEARCH,
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return schema
|
||||
|
||||
@@ -6,6 +6,7 @@ DOMAIN = "anthropic"
|
||||
LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DEFAULT_CONVERSATION_NAME = "Claude conversation"
|
||||
DEFAULT_AI_TASK_NAME = "Claude AI Task"
|
||||
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
|
||||
@@ -1,17 +1,24 @@
|
||||
"""Base entity for Anthropic."""
|
||||
|
||||
import base64
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
from dataclasses import dataclass, field
|
||||
import json
|
||||
from mimetypes import guess_file_type
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import anthropic
|
||||
from anthropic import AsyncStream
|
||||
from anthropic.types import (
|
||||
Base64ImageSourceParam,
|
||||
Base64PDFSourceParam,
|
||||
CitationsDelta,
|
||||
CitationsWebSearchResultLocation,
|
||||
CitationWebSearchResultLocationParam,
|
||||
ContentBlockParam,
|
||||
DocumentBlockParam,
|
||||
ImageBlockParam,
|
||||
InputJSONDelta,
|
||||
MessageDeltaUsage,
|
||||
MessageParam,
|
||||
@@ -37,6 +44,9 @@ from anthropic.types import (
|
||||
ThinkingConfigDisabledParam,
|
||||
ThinkingConfigEnabledParam,
|
||||
ThinkingDelta,
|
||||
ToolChoiceAnyParam,
|
||||
ToolChoiceAutoParam,
|
||||
ToolChoiceToolParam,
|
||||
ToolParam,
|
||||
ToolResultBlockParam,
|
||||
ToolUnionParam,
|
||||
@@ -50,13 +60,16 @@ from anthropic.types import (
|
||||
WebSearchToolResultError,
|
||||
)
|
||||
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigSubentry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import AnthropicConfigEntry
|
||||
from .const import (
|
||||
@@ -321,6 +334,7 @@ def _convert_content(
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
stream: AsyncStream[MessageStreamEvent],
|
||||
output_tool: str | None = None,
|
||||
) -> AsyncGenerator[
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
]:
|
||||
@@ -381,6 +395,16 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
input="",
|
||||
)
|
||||
current_tool_args = ""
|
||||
if response.content_block.name == output_tool:
|
||||
if first_block or content_details.has_content():
|
||||
if content_details.has_citations():
|
||||
content_details.delete_empty()
|
||||
yield {"native": content_details}
|
||||
content_details = ContentDetails()
|
||||
content_details.add_citation_detail()
|
||||
yield {"role": "assistant"}
|
||||
has_native = False
|
||||
first_block = False
|
||||
elif isinstance(response.content_block, TextBlock):
|
||||
if ( # Do not start a new assistant content just for citations, concatenate consecutive blocks with citations instead.
|
||||
first_block
|
||||
@@ -471,7 +495,16 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
first_block = True
|
||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||
if isinstance(response.delta, InputJSONDelta):
|
||||
current_tool_args += response.delta.partial_json
|
||||
if (
|
||||
current_tool_block is not None
|
||||
and current_tool_block["name"] == output_tool
|
||||
):
|
||||
content_details.citation_details[-1].length += len(
|
||||
response.delta.partial_json
|
||||
)
|
||||
yield {"content": response.delta.partial_json}
|
||||
else:
|
||||
current_tool_args += response.delta.partial_json
|
||||
elif isinstance(response.delta, TextDelta):
|
||||
content_details.citation_details[-1].length += len(response.delta.text)
|
||||
yield {"content": response.delta.text}
|
||||
@@ -490,6 +523,9 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
||||
content_details.add_citation(response.delta.citation)
|
||||
elif isinstance(response, RawContentBlockStopEvent):
|
||||
if current_tool_block is not None:
|
||||
if current_tool_block["name"] == output_tool:
|
||||
current_tool_block = None
|
||||
continue
|
||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||
current_tool_block["input"] = tool_args
|
||||
yield {
|
||||
@@ -557,6 +593,8 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
structure_name: str | None = None,
|
||||
structure: vol.Schema | None = None,
|
||||
) -> None:
|
||||
"""Generate an answer for the chat log."""
|
||||
options = self.subentry.data
|
||||
@@ -613,6 +651,74 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
}
|
||||
tools.append(web_search)
|
||||
|
||||
# Handle attachments by adding them to the last user message
|
||||
last_content = chat_log.content[-1]
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
last_message = messages[-1]
|
||||
if last_message["role"] != "user":
|
||||
raise HomeAssistantError(
|
||||
"Last message must be a user message to add attachments"
|
||||
)
|
||||
if isinstance(last_message["content"], str):
|
||||
last_message["content"] = [
|
||||
TextBlockParam(type="text", text=last_message["content"])
|
||||
]
|
||||
last_message["content"].extend( # type: ignore[union-attr]
|
||||
await async_prepare_files_for_prompt(
|
||||
self.hass, [(a.path, a.mime_type) for a in last_content.attachments]
|
||||
)
|
||||
)
|
||||
|
||||
if structure and structure_name:
|
||||
structure_name = slugify(structure_name)
|
||||
if model_args["thinking"]["type"] == "disabled":
|
||||
if not tools:
|
||||
# Simplest case: no tools and no extended thinking
|
||||
# Add a tool and force its use
|
||||
model_args["tool_choice"] = ToolChoiceToolParam(
|
||||
type="tool",
|
||||
name=structure_name,
|
||||
)
|
||||
else:
|
||||
# Second case: tools present but no extended thinking
|
||||
# Allow the model to use any tool but not text response
|
||||
# The model should know to use the right tool by its description
|
||||
model_args["tool_choice"] = ToolChoiceAnyParam(
|
||||
type="any",
|
||||
)
|
||||
else:
|
||||
# Extended thinking is enabled. With extended thinking, we cannot
|
||||
# force tool use or disable text responses, so we add a hint to the
|
||||
# system prompt instead. With extended thinking, the model should be
|
||||
# smart enough to use the tool.
|
||||
model_args["tool_choice"] = ToolChoiceAutoParam(
|
||||
type="auto",
|
||||
)
|
||||
|
||||
if isinstance(model_args["system"], str):
|
||||
model_args["system"] = [
|
||||
TextBlockParam(type="text", text=model_args["system"])
|
||||
]
|
||||
model_args["system"].append( # type: ignore[union-attr]
|
||||
TextBlockParam(
|
||||
type="text",
|
||||
text=f"Claude MUST use the '{structure_name}' tool to provide the final answer instead of plain text.",
|
||||
)
|
||||
)
|
||||
|
||||
tools.append(
|
||||
ToolParam(
|
||||
name=structure_name,
|
||||
description="Use this tool to reply to the user",
|
||||
input_schema=convert(
|
||||
structure,
|
||||
custom_serializer=chat_log.llm_api.custom_serializer
|
||||
if chat_log.llm_api
|
||||
else llm.selector_serializer,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
if tools:
|
||||
model_args["tools"] = tools
|
||||
|
||||
@@ -629,7 +735,11 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(chat_log, stream),
|
||||
_transform_stream(
|
||||
chat_log,
|
||||
stream,
|
||||
output_tool=structure_name if structure else None,
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
@@ -641,3 +751,59 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
|
||||
|
||||
async def async_prepare_files_for_prompt(
|
||||
hass: HomeAssistant, files: list[tuple[Path, str | None]]
|
||||
) -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
||||
"""Append files to a prompt.
|
||||
|
||||
Caller needs to ensure that the files are allowed.
|
||||
"""
|
||||
|
||||
def append_files_to_content() -> Iterable[ImageBlockParam | DocumentBlockParam]:
|
||||
content: list[ImageBlockParam | DocumentBlockParam] = []
|
||||
|
||||
for file_path, mime_type in files:
|
||||
if not file_path.exists():
|
||||
raise HomeAssistantError(f"`{file_path}` does not exist")
|
||||
|
||||
if mime_type is None:
|
||||
mime_type = guess_file_type(file_path)[0]
|
||||
|
||||
if not mime_type or not mime_type.startswith(("image/", "application/pdf")):
|
||||
raise HomeAssistantError(
|
||||
"Only images and PDF are supported by the Anthropic API,"
|
||||
f"`{file_path}` is not an image file or PDF"
|
||||
)
|
||||
if mime_type == "image/jpg":
|
||||
mime_type = "image/jpeg"
|
||||
|
||||
base64_file = base64.b64encode(file_path.read_bytes()).decode("utf-8")
|
||||
|
||||
if mime_type.startswith("image/"):
|
||||
content.append(
|
||||
ImageBlockParam(
|
||||
type="image",
|
||||
source=Base64ImageSourceParam(
|
||||
type="base64",
|
||||
media_type=mime_type, # type: ignore[typeddict-item]
|
||||
data=base64_file,
|
||||
),
|
||||
)
|
||||
)
|
||||
elif mime_type.startswith("application/pdf"):
|
||||
content.append(
|
||||
DocumentBlockParam(
|
||||
type="document",
|
||||
source=Base64PDFSourceParam(
|
||||
type="base64",
|
||||
media_type=mime_type, # type: ignore[typeddict-item]
|
||||
data=base64_file,
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
return await hass.async_add_executor_job(append_files_to_content)
|
||||
|
||||
@@ -18,43 +18,94 @@
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"ai_task_data": {
|
||||
"abort": {
|
||||
"entry_not_loaded": "[%key:component::anthropic::config_subentries::conversation::abort::entry_not_loaded%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"entry_type": "AI task",
|
||||
"initiate_flow": {
|
||||
"reconfigure": "Reconfigure AI task",
|
||||
"user": "Add AI task"
|
||||
},
|
||||
"step": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
|
||||
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
|
||||
},
|
||||
"init": {
|
||||
"data": {
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
|
||||
},
|
||||
"model": {
|
||||
"data": {
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data::thinking_budget%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data::web_search_max_uses%]"
|
||||
},
|
||||
"data_description": {
|
||||
"thinking_budget": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::thinking_budget%]",
|
||||
"user_location": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::user_location%]",
|
||||
"web_search": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search%]",
|
||||
"web_search_max_uses": "[%key:component::anthropic::config_subentries::conversation::step::model::data_description::web_search_max_uses%]"
|
||||
},
|
||||
"title": "[%key:component::anthropic::config_subentries::conversation::step::model::title%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"conversation": {
|
||||
"abort": {
|
||||
"entry_not_loaded": "Cannot add things while the configuration is disabled.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"entry_type": "Conversation agent",
|
||||
|
||||
"error": {
|
||||
"thinking_budget_too_large": "Maximum tokens must be greater than the thinking budget.",
|
||||
"web_search_unsupported_model": "Web search is not supported by the selected model. Please choose a compatible model or disable web search."
|
||||
},
|
||||
"initiate_flow": {
|
||||
"reconfigure": "Reconfigure conversation agent",
|
||||
"user": "Add conversation agent"
|
||||
},
|
||||
"step": {
|
||||
"set_options": {
|
||||
"advanced": {
|
||||
"data": {
|
||||
"chat_model": "[%key:common::generic::model%]",
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"max_tokens": "Maximum tokens to return in response",
|
||||
"temperature": "Temperature"
|
||||
},
|
||||
"title": "Advanced settings"
|
||||
},
|
||||
"init": {
|
||||
"data": {
|
||||
"llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]",
|
||||
"name": "[%key:common::config_flow::data::name%]",
|
||||
"prompt": "[%key:common::config_flow::data::prompt%]",
|
||||
"recommended": "Recommended model settings",
|
||||
"temperature": "Temperature",
|
||||
"recommended": "Recommended model settings"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template."
|
||||
},
|
||||
"title": "Basic settings"
|
||||
},
|
||||
"model": {
|
||||
"data": {
|
||||
"thinking_budget": "Thinking budget",
|
||||
"user_location": "Include home location",
|
||||
"web_search": "Enable web search",
|
||||
"web_search_max_uses": "Maximum web searches"
|
||||
},
|
||||
"data_description": {
|
||||
"prompt": "Instruct how the LLM should respond. This can be a template.",
|
||||
"thinking_budget": "The number of tokens the model can use to think about the response out of the total maximum number of tokens. Set to 1024 or greater to enable extended thinking.",
|
||||
"user_location": "Localize search results based on home location",
|
||||
"web_search": "The web search tool gives Claude direct access to real-time web content, allowing it to answer questions with up-to-date information beyond its knowledge cutoff",
|
||||
"web_search_max_uses": "Limit the number of searches performed per response"
|
||||
}
|
||||
},
|
||||
"title": "Model-specific options"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -98,5 +98,27 @@
|
||||
"name": "Start conversation"
|
||||
}
|
||||
},
|
||||
"title": "Assist satellite"
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"listening": {
|
||||
"description": "Triggers when a satellite starts listening for a command.",
|
||||
"description_configured": "Triggers when a satellite starts listening for a command",
|
||||
"name": "When a satellite starts listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers when a satellite starts processing a command.",
|
||||
"description_configured": "Triggers when a satellite starts processing a command",
|
||||
"name": "When a satellite starts processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers when a satellite starts responding to a command.",
|
||||
"description_configured": "Triggers when a satellite starts responding to a command",
|
||||
"name": "When a satellite starts responding"
|
||||
},
|
||||
"idle": {
|
||||
"description": "Triggers when a satellite goes back to idle.",
|
||||
"description_configured": "Triggers when a satellite goes back to idle",
|
||||
"name": "When a satellite goes back to idle"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
140
homeassistant/components/assist_satellite/trigger.py
Normal file
140
homeassistant/components/assist_satellite/trigger.py
Normal file
@@ -0,0 +1,140 @@
|
||||
"""Provides triggers for assist satellites."""
|
||||
|
||||
from typing import TYPE_CHECKING, cast, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_TARGET,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import process_state_match
|
||||
from homeassistant.helpers.target import (
|
||||
TargetStateChangedData,
|
||||
async_track_target_selector_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
STATE_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class StateTriggerBase(Trigger):
|
||||
"""Trigger for assist satellite state changes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, STATE_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig, state: str) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
self._target = config.target
|
||||
self._state = state
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
match_config_state = process_state_match(self._state)
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if the new state matches the trigger state
|
||||
if not match_config_state(to_state.state):
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"{entity_id} {self._state}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ListeningTrigger(StateTriggerBase):
|
||||
"""Trigger for when a satellite starts listening."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the listening trigger."""
|
||||
super().__init__(hass, config, "listening")
|
||||
|
||||
|
||||
class ProcessingTrigger(StateTriggerBase):
|
||||
"""Trigger for when a satellite starts processing."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the processing trigger."""
|
||||
super().__init__(hass, config, "processing")
|
||||
|
||||
|
||||
class RespondingTrigger(StateTriggerBase):
|
||||
"""Trigger for when a satellite starts responding."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the responding trigger."""
|
||||
super().__init__(hass, config, "responding")
|
||||
|
||||
|
||||
class IdleTrigger(StateTriggerBase):
|
||||
"""Trigger for when a satellite goes back to idle."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the idle trigger."""
|
||||
super().__init__(hass, config, "idle")
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"listening": ListeningTrigger,
|
||||
"processing": ProcessingTrigger,
|
||||
"responding": RespondingTrigger,
|
||||
"idle": IdleTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for assist satellites."""
|
||||
return TRIGGERS
|
||||
19
homeassistant/components/assist_satellite/triggers.yaml
Normal file
19
homeassistant/components/assist_satellite/triggers.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
listening:
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
|
||||
processing:
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
|
||||
responding:
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
|
||||
idle:
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
@@ -14,10 +14,11 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_entry_oauth2_flow,
|
||||
device_registry as dr,
|
||||
issue_registry as ir,
|
||||
from homeassistant.helpers import device_registry as dr, issue_registry as ir
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
ImplementationUnavailableError,
|
||||
OAuth2Session,
|
||||
async_get_config_entry_implementation,
|
||||
)
|
||||
|
||||
from .const import DEFAULT_AUGUST_BRAND, DOMAIN, PLATFORMS
|
||||
@@ -37,14 +38,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bo
|
||||
|
||||
session = async_create_august_clientsession(hass)
|
||||
try:
|
||||
implementation = (
|
||||
await config_entry_oauth2_flow.async_get_config_entry_implementation(
|
||||
hass, entry
|
||||
)
|
||||
)
|
||||
except ValueError as err:
|
||||
implementation = await async_get_config_entry_implementation(hass, entry)
|
||||
except ImplementationUnavailableError as err:
|
||||
raise ConfigEntryNotReady("OAuth implementation not available") from err
|
||||
oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation)
|
||||
oauth_session = OAuth2Session(hass, entry, implementation)
|
||||
august_gateway = AugustGateway(Path(hass.config.config_dir), session, oauth_session)
|
||||
try:
|
||||
await async_setup_august(hass, entry, august_gateway)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["avea"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["avea==1.5.1"]
|
||||
"requirements": ["avea==1.6.1"]
|
||||
}
|
||||
|
||||
116
homeassistant/components/backblaze_b2/__init__.py
Normal file
116
homeassistant/components/backblaze_b2/__init__.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""The Backblaze B2 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from b2sdk.v2 import B2Api, Bucket, InMemoryAccountInfo, exception
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from .const import (
|
||||
BACKBLAZE_REALM,
|
||||
CONF_APPLICATION_KEY,
|
||||
CONF_BUCKET,
|
||||
CONF_KEY_ID,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
)
|
||||
from .repairs import (
|
||||
async_check_for_repair_issues,
|
||||
create_bucket_access_restricted_issue,
|
||||
create_bucket_not_found_issue,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type BackblazeConfigEntry = ConfigEntry[Bucket]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
|
||||
"""Set up Backblaze B2 from a config entry."""
|
||||
|
||||
info = InMemoryAccountInfo()
|
||||
b2_api = B2Api(info)
|
||||
|
||||
def _authorize_and_get_bucket_sync() -> Bucket:
|
||||
"""Synchronously authorize the Backblaze B2 account and retrieve the bucket.
|
||||
|
||||
This function runs in the event loop's executor as b2sdk operations are blocking.
|
||||
"""
|
||||
b2_api.authorize_account(
|
||||
BACKBLAZE_REALM,
|
||||
entry.data[CONF_KEY_ID],
|
||||
entry.data[CONF_APPLICATION_KEY],
|
||||
)
|
||||
return b2_api.get_bucket_by_name(entry.data[CONF_BUCKET])
|
||||
|
||||
try:
|
||||
bucket = await hass.async_add_executor_job(_authorize_and_get_bucket_sync)
|
||||
except exception.Unauthorized as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_credentials",
|
||||
) from err
|
||||
except exception.RestrictedBucket as err:
|
||||
create_bucket_access_restricted_issue(hass, entry, err.bucket_name)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="restricted_bucket",
|
||||
translation_placeholders={
|
||||
"restricted_bucket_name": err.bucket_name,
|
||||
},
|
||||
) from err
|
||||
except exception.NonExistentBucket as err:
|
||||
create_bucket_not_found_issue(hass, entry, entry.data[CONF_BUCKET])
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_bucket_name",
|
||||
) from err
|
||||
except exception.ConnectionReset as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
) from err
|
||||
except exception.MissingAccountData as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
) from err
|
||||
|
||||
entry.runtime_data = bucket
|
||||
|
||||
def _async_notify_backup_listeners() -> None:
|
||||
"""Notify any registered backup agent listeners."""
|
||||
_LOGGER.debug("Notifying backup listeners for entry %s", entry.entry_id)
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
|
||||
|
||||
async def _periodic_issue_check(_now: Any) -> None:
|
||||
"""Periodically check for repair issues."""
|
||||
await async_check_for_repair_issues(hass, entry)
|
||||
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval(hass, _periodic_issue_check, timedelta(minutes=30))
|
||||
)
|
||||
|
||||
hass.async_create_task(async_check_for_repair_issues(hass, entry))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: BackblazeConfigEntry) -> bool:
|
||||
"""Unload a Backblaze B2 config entry.
|
||||
|
||||
Any resources directly managed by this entry that need explicit shutdown
|
||||
would be handled here. In this case, the `async_on_state_change` listener
|
||||
handles the notification logic on unload.
|
||||
"""
|
||||
return True
|
||||
615
homeassistant/components/backblaze_b2/backup.py
Normal file
615
homeassistant/components/backblaze_b2/backup.py
Normal file
@@ -0,0 +1,615 @@
|
||||
"""Backup platform for the Backblaze B2 integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
import functools
|
||||
import json
|
||||
import logging
|
||||
import mimetypes
|
||||
from time import time
|
||||
from typing import Any
|
||||
|
||||
from b2sdk.v2 import FileVersion
|
||||
from b2sdk.v2.exception import B2Error
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.util.async_iterator import AsyncIteratorReader
|
||||
|
||||
from . import BackblazeConfigEntry
|
||||
from .const import (
|
||||
CONF_PREFIX,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
METADATA_FILE_SUFFIX,
|
||||
METADATA_VERSION,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Cache TTL for backup list (in seconds)
|
||||
CACHE_TTL = 300
|
||||
|
||||
|
||||
def suggested_filenames(backup: AgentBackup) -> tuple[str, str]:
|
||||
"""Return the suggested filenames for the backup and metadata files."""
|
||||
base_name = suggested_filename(backup).rsplit(".", 1)[0]
|
||||
return f"{base_name}.tar", f"{base_name}.metadata.json"
|
||||
|
||||
|
||||
def _parse_metadata(raw_content: str) -> dict[str, Any]:
|
||||
"""Parse metadata content from JSON."""
|
||||
try:
|
||||
data = json.loads(raw_content)
|
||||
except json.JSONDecodeError as err:
|
||||
raise ValueError(f"Invalid JSON format: {err}") from err
|
||||
else:
|
||||
if not isinstance(data, dict):
|
||||
raise TypeError("JSON content is not a dictionary")
|
||||
return data
|
||||
|
||||
|
||||
def _find_backup_file_for_metadata(
|
||||
metadata_filename: str, all_files: dict[str, FileVersion], prefix: str
|
||||
) -> FileVersion | None:
|
||||
"""Find corresponding backup file for metadata file."""
|
||||
base_name = metadata_filename[len(prefix) :].removesuffix(METADATA_FILE_SUFFIX)
|
||||
return next(
|
||||
(
|
||||
file
|
||||
for name, file in all_files.items()
|
||||
if name.startswith(prefix + base_name)
|
||||
and name.endswith(".tar")
|
||||
and name != metadata_filename
|
||||
),
|
||||
None,
|
||||
)
|
||||
|
||||
|
||||
def _create_backup_from_metadata(
|
||||
metadata_content: dict[str, Any], backup_file: FileVersion
|
||||
) -> AgentBackup:
|
||||
"""Construct an AgentBackup from parsed metadata content and the associated backup file."""
|
||||
metadata = metadata_content["backup_metadata"]
|
||||
metadata["size"] = backup_file.size
|
||||
return AgentBackup.from_dict(metadata)
|
||||
|
||||
|
||||
def handle_b2_errors[T](
|
||||
func: Callable[..., Coroutine[Any, Any, T]],
|
||||
) -> Callable[..., Coroutine[Any, Any, T]]:
|
||||
"""Handle B2Errors by converting them to BackupAgentError."""
|
||||
|
||||
@functools.wraps(func)
|
||||
async def wrapper(*args: Any, **kwargs: Any) -> T:
|
||||
"""Catch B2Error and raise BackupAgentError."""
|
||||
try:
|
||||
return await func(*args, **kwargs)
|
||||
except B2Error as err:
|
||||
error_msg = f"Failed during {func.__name__}"
|
||||
raise BackupAgentError(error_msg) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents for all configured Backblaze B2 entries."""
|
||||
entries: list[BackblazeConfigEntry] = hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
return [BackblazeBackupAgent(hass, entry) for entry in entries]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when backup agents are added or removed.
|
||||
|
||||
:return: A function to unregister the listener.
|
||||
"""
|
||||
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove the listener."""
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||
hass.data.pop(DATA_BACKUP_AGENT_LISTENERS, None)
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
||||
class BackblazeBackupAgent(BackupAgent):
|
||||
"""Backup agent for Backblaze B2 cloud storage."""
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: BackblazeConfigEntry) -> None:
|
||||
"""Initialize the Backblaze B2 agent."""
|
||||
super().__init__()
|
||||
self._hass = hass
|
||||
self._bucket = entry.runtime_data
|
||||
self._prefix = entry.data[CONF_PREFIX]
|
||||
|
||||
self.name = entry.title
|
||||
self.unique_id = entry.entry_id
|
||||
|
||||
self._all_files_cache: dict[str, FileVersion] = {}
|
||||
self._all_files_cache_expiration: float = 0.0
|
||||
self._backup_list_cache: dict[str, AgentBackup] = {}
|
||||
self._backup_list_cache_expiration: float = 0.0
|
||||
|
||||
self._all_files_cache_lock = asyncio.Lock()
|
||||
self._backup_list_cache_lock = asyncio.Lock()
|
||||
|
||||
def _is_cache_valid(self, expiration_time: float) -> bool:
|
||||
"""Check if cache is still valid based on expiration time."""
|
||||
return time() <= expiration_time
|
||||
|
||||
async def _cleanup_failed_upload(self, filename: str) -> None:
|
||||
"""Clean up a partially uploaded file after upload failure."""
|
||||
_LOGGER.warning(
|
||||
"Attempting to delete partially uploaded main backup file %s "
|
||||
"due to metadata upload failure",
|
||||
filename,
|
||||
)
|
||||
try:
|
||||
uploaded_main_file_info = await self._hass.async_add_executor_job(
|
||||
self._bucket.get_file_info_by_name, filename
|
||||
)
|
||||
await self._hass.async_add_executor_job(uploaded_main_file_info.delete)
|
||||
except B2Error:
|
||||
_LOGGER.debug(
|
||||
"Failed to clean up partially uploaded main backup file %s. "
|
||||
"Manual intervention may be required to delete it from Backblaze B2",
|
||||
filename,
|
||||
exc_info=True,
|
||||
)
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Successfully deleted partially uploaded main backup file %s", filename
|
||||
)
|
||||
|
||||
async def _get_file_for_download(self, backup_id: str) -> FileVersion:
|
||||
"""Get backup file for download, raising if not found."""
|
||||
file, _ = await self._find_file_and_metadata_version_by_id(backup_id)
|
||||
if not file:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
return file
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_download_backup(
|
||||
self, backup_id: str, **kwargs: Any
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup from Backblaze B2."""
|
||||
file = await self._get_file_for_download(backup_id)
|
||||
_LOGGER.debug("Downloading %s", file.file_name)
|
||||
|
||||
downloaded_file = await self._hass.async_add_executor_job(file.download)
|
||||
response = downloaded_file.response
|
||||
|
||||
async def stream_response() -> AsyncIterator[bytes]:
|
||||
"""Stream the response into an AsyncIterator."""
|
||||
try:
|
||||
iterator = response.iter_content(chunk_size=1024 * 1024)
|
||||
while True:
|
||||
chunk = await self._hass.async_add_executor_job(
|
||||
next, iterator, None
|
||||
)
|
||||
if chunk is None:
|
||||
break
|
||||
yield chunk
|
||||
finally:
|
||||
_LOGGER.debug("Finished streaming download for %s", file.file_name)
|
||||
|
||||
return stream_response()
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup to Backblaze B2.
|
||||
|
||||
This involves uploading the main backup archive and a separate metadata JSON file.
|
||||
"""
|
||||
tar_filename, metadata_filename = suggested_filenames(backup)
|
||||
prefixed_tar_filename = self._prefix + tar_filename
|
||||
prefixed_metadata_filename = self._prefix + metadata_filename
|
||||
|
||||
metadata_content_bytes = json.dumps(
|
||||
{
|
||||
"metadata_version": METADATA_VERSION,
|
||||
"backup_id": backup.backup_id,
|
||||
"backup_metadata": backup.as_dict(),
|
||||
}
|
||||
).encode("utf-8")
|
||||
|
||||
_LOGGER.debug(
|
||||
"Uploading backup: %s, and metadata: %s",
|
||||
prefixed_tar_filename,
|
||||
prefixed_metadata_filename,
|
||||
)
|
||||
|
||||
upload_successful = False
|
||||
try:
|
||||
await self._upload_backup_file(prefixed_tar_filename, open_stream, {})
|
||||
_LOGGER.debug(
|
||||
"Main backup file upload finished for %s", prefixed_tar_filename
|
||||
)
|
||||
|
||||
_LOGGER.debug("Uploading metadata file: %s", prefixed_metadata_filename)
|
||||
await self._upload_metadata_file(
|
||||
metadata_content_bytes, prefixed_metadata_filename
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Metadata file upload finished for %s", prefixed_metadata_filename
|
||||
)
|
||||
upload_successful = True
|
||||
finally:
|
||||
if upload_successful:
|
||||
_LOGGER.debug("Backup upload complete: %s", prefixed_tar_filename)
|
||||
self._invalidate_caches(
|
||||
backup.backup_id, prefixed_tar_filename, prefixed_metadata_filename
|
||||
)
|
||||
else:
|
||||
await self._cleanup_failed_upload(prefixed_tar_filename)
|
||||
|
||||
def _upload_metadata_file_sync(
|
||||
self, metadata_content: bytes, filename: str
|
||||
) -> None:
|
||||
"""Synchronously upload metadata file to B2."""
|
||||
self._bucket.upload_bytes(
|
||||
metadata_content,
|
||||
filename,
|
||||
content_type="application/json",
|
||||
file_info={"metadata_only": "true"},
|
||||
)
|
||||
|
||||
async def _upload_metadata_file(
|
||||
self, metadata_content: bytes, filename: str
|
||||
) -> None:
|
||||
"""Upload metadata file to B2."""
|
||||
await self._hass.async_add_executor_job(
|
||||
self._upload_metadata_file_sync,
|
||||
metadata_content,
|
||||
filename,
|
||||
)
|
||||
|
||||
def _upload_unbound_stream_sync(
|
||||
self,
|
||||
reader: AsyncIteratorReader,
|
||||
filename: str,
|
||||
content_type: str,
|
||||
file_info: dict[str, Any],
|
||||
) -> FileVersion:
|
||||
"""Synchronously upload unbound stream to B2."""
|
||||
return self._bucket.upload_unbound_stream(
|
||||
reader,
|
||||
filename,
|
||||
content_type=content_type,
|
||||
file_info=file_info,
|
||||
)
|
||||
|
||||
def _download_and_parse_metadata_sync(
|
||||
self, metadata_file_version: FileVersion
|
||||
) -> dict[str, Any]:
|
||||
"""Synchronously download and parse metadata file."""
|
||||
return _parse_metadata(
|
||||
metadata_file_version.download().response.content.decode("utf-8")
|
||||
)
|
||||
|
||||
async def _upload_backup_file(
|
||||
self,
|
||||
filename: str,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
file_info: dict[str, Any],
|
||||
) -> None:
|
||||
"""Upload backup file to B2 using streaming."""
|
||||
_LOGGER.debug("Starting streaming upload for %s", filename)
|
||||
|
||||
stream = await open_stream()
|
||||
reader = AsyncIteratorReader(self._hass.loop, stream)
|
||||
|
||||
_LOGGER.debug("Uploading backup file %s with streaming", filename)
|
||||
try:
|
||||
content_type, _ = mimetypes.guess_type(filename)
|
||||
file_version = await self._hass.async_add_executor_job(
|
||||
self._upload_unbound_stream_sync,
|
||||
reader,
|
||||
filename,
|
||||
content_type or "application/x-tar",
|
||||
file_info,
|
||||
)
|
||||
finally:
|
||||
reader.close()
|
||||
|
||||
_LOGGER.debug("Successfully uploaded %s (ID: %s)", filename, file_version.id_)
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_delete_backup(self, backup_id: str, **kwargs: Any) -> None:
|
||||
"""Delete a backup and its associated metadata file from Backblaze B2."""
|
||||
file, metadata_file = await self._find_file_and_metadata_version_by_id(
|
||||
backup_id
|
||||
)
|
||||
if not file:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
# Invariant: when file is not None, metadata_file is also not None
|
||||
assert metadata_file is not None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Deleting backup file: %s and metadata file: %s",
|
||||
file.file_name,
|
||||
metadata_file.file_name,
|
||||
)
|
||||
|
||||
await self._hass.async_add_executor_job(file.delete)
|
||||
await self._hass.async_add_executor_job(metadata_file.delete)
|
||||
|
||||
self._invalidate_caches(
|
||||
backup_id,
|
||||
file.file_name,
|
||||
metadata_file.file_name,
|
||||
remove_files=True,
|
||||
)
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List all backups by finding their associated metadata files in Backblaze B2."""
|
||||
async with self._backup_list_cache_lock:
|
||||
if self._backup_list_cache and self._is_cache_valid(
|
||||
self._backup_list_cache_expiration
|
||||
):
|
||||
_LOGGER.debug("Returning backups from cache")
|
||||
return list(self._backup_list_cache.values())
|
||||
|
||||
_LOGGER.debug(
|
||||
"Cache expired or empty, fetching all files from B2 to build backup list"
|
||||
)
|
||||
all_files_in_prefix = await self._get_all_files_in_prefix()
|
||||
|
||||
_LOGGER.debug(
|
||||
"Files found in prefix '%s': %s",
|
||||
self._prefix,
|
||||
list(all_files_in_prefix.keys()),
|
||||
)
|
||||
|
||||
# Process metadata files sequentially to avoid exhausting executor pool
|
||||
backups = {}
|
||||
for file_name, file_version in all_files_in_prefix.items():
|
||||
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||
backup = await self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
all_files_in_prefix,
|
||||
)
|
||||
if backup:
|
||||
backups[backup.backup_id] = backup
|
||||
self._backup_list_cache = backups
|
||||
self._backup_list_cache_expiration = time() + CACHE_TTL
|
||||
|
||||
return list(backups.values())
|
||||
|
||||
@handle_b2_errors
|
||||
async def async_get_backup(self, backup_id: str, **kwargs: Any) -> AgentBackup:
|
||||
"""Get a specific backup by its ID from Backblaze B2."""
|
||||
if self._backup_list_cache and self._is_cache_valid(
|
||||
self._backup_list_cache_expiration
|
||||
):
|
||||
if backup := self._backup_list_cache.get(backup_id):
|
||||
_LOGGER.debug("Returning backup %s from cache", backup_id)
|
||||
return backup
|
||||
|
||||
file, metadata_file_version = await self._find_file_and_metadata_version_by_id(
|
||||
backup_id
|
||||
)
|
||||
if not file or not metadata_file_version:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
|
||||
metadata_content = await self._hass.async_add_executor_job(
|
||||
self._download_and_parse_metadata_sync,
|
||||
metadata_file_version,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully retrieved metadata for backup ID %s from file %s",
|
||||
backup_id,
|
||||
metadata_file_version.file_name,
|
||||
)
|
||||
backup = _create_backup_from_metadata(metadata_content, file)
|
||||
|
||||
if self._is_cache_valid(self._backup_list_cache_expiration):
|
||||
self._backup_list_cache[backup.backup_id] = backup
|
||||
|
||||
return backup
|
||||
|
||||
async def _find_file_and_metadata_version_by_id(
|
||||
self, backup_id: str
|
||||
) -> tuple[FileVersion | None, FileVersion | None]:
|
||||
"""Find the main backup file and its associated metadata file version by backup ID."""
|
||||
all_files_in_prefix = await self._get_all_files_in_prefix()
|
||||
|
||||
# Process metadata files sequentially to avoid exhausting executor pool
|
||||
for file_name, file_version in all_files_in_prefix.items():
|
||||
if file_name.endswith(METADATA_FILE_SUFFIX):
|
||||
(
|
||||
result_backup_file,
|
||||
result_metadata_file_version,
|
||||
) = await self._hass.async_add_executor_job(
|
||||
self._process_metadata_file_for_id_sync,
|
||||
file_name,
|
||||
file_version,
|
||||
backup_id,
|
||||
all_files_in_prefix,
|
||||
)
|
||||
if result_backup_file and result_metadata_file_version:
|
||||
return result_backup_file, result_metadata_file_version
|
||||
|
||||
_LOGGER.debug("Backup %s not found", backup_id)
|
||||
return None, None
|
||||
|
||||
def _process_metadata_file_for_id_sync(
|
||||
self,
|
||||
file_name: str,
|
||||
file_version: FileVersion,
|
||||
target_backup_id: str,
|
||||
all_files_in_prefix: dict[str, FileVersion],
|
||||
) -> tuple[FileVersion | None, FileVersion | None]:
|
||||
"""Synchronously process a single metadata file for a specific backup ID.
|
||||
|
||||
Called within a thread pool executor.
|
||||
"""
|
||||
try:
|
||||
download_response = file_version.download().response
|
||||
except B2Error as err:
|
||||
_LOGGER.warning(
|
||||
"Failed to download metadata file %s during ID search: %s",
|
||||
file_name,
|
||||
err,
|
||||
)
|
||||
return None, None
|
||||
|
||||
try:
|
||||
metadata_content = _parse_metadata(
|
||||
download_response.content.decode("utf-8")
|
||||
)
|
||||
except ValueError:
|
||||
return None, None
|
||||
|
||||
if metadata_content["backup_id"] != target_backup_id:
|
||||
_LOGGER.debug(
|
||||
"Metadata file %s does not match target backup ID %s",
|
||||
file_name,
|
||||
target_backup_id,
|
||||
)
|
||||
return None, None
|
||||
|
||||
found_backup_file = _find_backup_file_for_metadata(
|
||||
file_name, all_files_in_prefix, self._prefix
|
||||
)
|
||||
if not found_backup_file:
|
||||
_LOGGER.warning(
|
||||
"Found metadata file %s for backup ID %s, but no corresponding backup file",
|
||||
file_name,
|
||||
target_backup_id,
|
||||
)
|
||||
return None, None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Found backup file %s and metadata file %s for ID %s",
|
||||
found_backup_file.file_name,
|
||||
file_name,
|
||||
target_backup_id,
|
||||
)
|
||||
return found_backup_file, file_version
|
||||
|
||||
async def _get_all_files_in_prefix(self) -> dict[str, FileVersion]:
|
||||
"""Get all file versions in the configured prefix from Backblaze B2.
|
||||
|
||||
Uses a cache to minimize API calls.
|
||||
|
||||
This fetches a flat list of all files, including main backups and metadata files.
|
||||
"""
|
||||
async with self._all_files_cache_lock:
|
||||
if self._is_cache_valid(self._all_files_cache_expiration):
|
||||
_LOGGER.debug("Returning all files from cache")
|
||||
return self._all_files_cache
|
||||
|
||||
_LOGGER.debug("Cache for all files expired or empty, fetching from B2")
|
||||
all_files_in_prefix = await self._hass.async_add_executor_job(
|
||||
self._fetch_all_files_in_prefix
|
||||
)
|
||||
self._all_files_cache = all_files_in_prefix
|
||||
self._all_files_cache_expiration = time() + CACHE_TTL
|
||||
return all_files_in_prefix
|
||||
|
||||
def _fetch_all_files_in_prefix(self) -> dict[str, FileVersion]:
|
||||
"""Fetch all files in the configured prefix from B2."""
|
||||
all_files: dict[str, FileVersion] = {}
|
||||
for file, _ in self._bucket.ls(self._prefix):
|
||||
all_files[file.file_name] = file
|
||||
return all_files
|
||||
|
||||
def _process_metadata_file_sync(
|
||||
self,
|
||||
file_name: str,
|
||||
file_version: FileVersion,
|
||||
all_files_in_prefix: dict[str, FileVersion],
|
||||
) -> AgentBackup | None:
|
||||
"""Synchronously process a single metadata file and return an AgentBackup if valid."""
|
||||
try:
|
||||
download_response = file_version.download().response
|
||||
except B2Error as err:
|
||||
_LOGGER.warning("Failed to download metadata file %s: %s", file_name, err)
|
||||
return None
|
||||
|
||||
try:
|
||||
metadata_content = _parse_metadata(
|
||||
download_response.content.decode("utf-8")
|
||||
)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
found_backup_file = _find_backup_file_for_metadata(
|
||||
file_name, all_files_in_prefix, self._prefix
|
||||
)
|
||||
if not found_backup_file:
|
||||
_LOGGER.warning(
|
||||
"Found metadata file %s but no corresponding backup file",
|
||||
file_name,
|
||||
)
|
||||
return None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully processed metadata file %s for backup ID %s",
|
||||
file_name,
|
||||
metadata_content["backup_id"],
|
||||
)
|
||||
return _create_backup_from_metadata(metadata_content, found_backup_file)
|
||||
|
||||
def _invalidate_caches(
|
||||
self,
|
||||
backup_id: str,
|
||||
tar_filename: str,
|
||||
metadata_filename: str | None,
|
||||
*,
|
||||
remove_files: bool = False,
|
||||
) -> None:
|
||||
"""Invalidate caches after upload/deletion operations.
|
||||
|
||||
Args:
|
||||
backup_id: The backup ID to remove from backup cache
|
||||
tar_filename: The tar filename to remove from files cache
|
||||
metadata_filename: The metadata filename to remove from files cache
|
||||
remove_files: If True, remove specific files from cache; if False, expire entire cache
|
||||
"""
|
||||
if remove_files:
|
||||
if self._is_cache_valid(self._all_files_cache_expiration):
|
||||
self._all_files_cache.pop(tar_filename, None)
|
||||
if metadata_filename:
|
||||
self._all_files_cache.pop(metadata_filename, None)
|
||||
|
||||
if self._is_cache_valid(self._backup_list_cache_expiration):
|
||||
self._backup_list_cache.pop(backup_id, None)
|
||||
else:
|
||||
# For uploads, we can't easily add new FileVersion objects without API calls,
|
||||
# so we expire the entire cache for simplicity
|
||||
self._all_files_cache_expiration = 0.0
|
||||
self._backup_list_cache_expiration = 0.0
|
||||
288
homeassistant/components/backblaze_b2/config_flow.py
Normal file
288
homeassistant/components/backblaze_b2/config_flow.py
Normal file
@@ -0,0 +1,288 @@
|
||||
"""Config flow for the Backblaze B2 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from b2sdk.v2 import B2Api, InMemoryAccountInfo, exception
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
BACKBLAZE_REALM,
|
||||
CONF_APPLICATION_KEY,
|
||||
CONF_BUCKET,
|
||||
CONF_KEY_ID,
|
||||
CONF_PREFIX,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Constants
|
||||
REQUIRED_CAPABILITIES = {"writeFiles", "listFiles", "deleteFiles", "readFiles"}
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_KEY_ID): cv.string,
|
||||
vol.Required(CONF_APPLICATION_KEY): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_BUCKET): cv.string,
|
||||
vol.Optional(CONF_PREFIX, default=""): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class BackblazeConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Backblaze B2."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
reauth_entry: ConfigEntry[Any] | None
|
||||
|
||||
def _abort_if_duplicate_credentials(self, user_input: dict[str, Any]) -> None:
|
||||
"""Abort if credentials already exist in another entry."""
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||
}
|
||||
)
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by the user."""
|
||||
errors: dict[str, str] = {}
|
||||
placeholders: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._abort_if_duplicate_credentials(user_input)
|
||||
|
||||
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||
user_input
|
||||
)
|
||||
|
||||
if not errors:
|
||||
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
|
||||
"/"
|
||||
):
|
||||
user_input[CONF_PREFIX] += "/"
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_BUCKET], data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
|
||||
)
|
||||
|
||||
async def _async_validate_backblaze_connection(
|
||||
self, user_input: dict[str, Any]
|
||||
) -> tuple[dict[str, str], dict[str, str]]:
|
||||
"""Validate Backblaze B2 credentials, bucket, capabilities, and prefix.
|
||||
|
||||
Returns a tuple of (errors_dict, placeholders_dict).
|
||||
"""
|
||||
errors: dict[str, str] = {}
|
||||
placeholders: dict[str, str] = {}
|
||||
|
||||
info = InMemoryAccountInfo()
|
||||
b2_api = B2Api(info)
|
||||
|
||||
def _authorize_and_get_bucket_sync() -> None:
|
||||
"""Synchronously authorize the account and get the bucket by name.
|
||||
|
||||
This function is run in the executor because b2sdk operations are blocking.
|
||||
"""
|
||||
b2_api.authorize_account(
|
||||
BACKBLAZE_REALM, # Use the defined realm constant
|
||||
user_input[CONF_KEY_ID],
|
||||
user_input[CONF_APPLICATION_KEY],
|
||||
)
|
||||
b2_api.get_bucket_by_name(user_input[CONF_BUCKET])
|
||||
|
||||
try:
|
||||
await self.hass.async_add_executor_job(_authorize_and_get_bucket_sync)
|
||||
|
||||
allowed = b2_api.account_info.get_allowed()
|
||||
|
||||
# Check if allowed info is available
|
||||
if allowed is None or not allowed.get("capabilities"):
|
||||
errors["base"] = "invalid_capability"
|
||||
placeholders["missing_capabilities"] = ", ".join(
|
||||
sorted(REQUIRED_CAPABILITIES)
|
||||
)
|
||||
else:
|
||||
# Check if all required capabilities are present
|
||||
current_caps = set(allowed["capabilities"])
|
||||
if not REQUIRED_CAPABILITIES.issubset(current_caps):
|
||||
missing_caps = REQUIRED_CAPABILITIES - current_caps
|
||||
_LOGGER.warning(
|
||||
"Missing required Backblaze B2 capabilities for Key ID '%s': %s",
|
||||
user_input[CONF_KEY_ID],
|
||||
", ".join(sorted(missing_caps)),
|
||||
)
|
||||
errors["base"] = "invalid_capability"
|
||||
placeholders["missing_capabilities"] = ", ".join(
|
||||
sorted(missing_caps)
|
||||
)
|
||||
else:
|
||||
# Only check prefix if capabilities are valid
|
||||
configured_prefix: str = user_input[CONF_PREFIX]
|
||||
allowed_prefix = allowed.get("namePrefix") or ""
|
||||
# Ensure configured prefix starts with Backblaze B2's allowed prefix
|
||||
if allowed_prefix and not configured_prefix.startswith(
|
||||
allowed_prefix
|
||||
):
|
||||
errors[CONF_PREFIX] = "invalid_prefix"
|
||||
placeholders["allowed_prefix"] = allowed_prefix
|
||||
|
||||
except exception.Unauthorized:
|
||||
_LOGGER.debug(
|
||||
"Backblaze B2 authentication failed for Key ID '%s'",
|
||||
user_input[CONF_KEY_ID],
|
||||
)
|
||||
errors["base"] = "invalid_credentials"
|
||||
except exception.RestrictedBucket as err:
|
||||
_LOGGER.debug(
|
||||
"Access to Backblaze B2 bucket '%s' is restricted: %s",
|
||||
user_input[CONF_BUCKET],
|
||||
err,
|
||||
)
|
||||
placeholders["restricted_bucket_name"] = err.bucket_name
|
||||
errors[CONF_BUCKET] = "restricted_bucket"
|
||||
except exception.NonExistentBucket:
|
||||
_LOGGER.debug(
|
||||
"Backblaze B2 bucket '%s' does not exist", user_input[CONF_BUCKET]
|
||||
)
|
||||
errors[CONF_BUCKET] = "invalid_bucket_name"
|
||||
except exception.ConnectionReset:
|
||||
_LOGGER.error("Failed to connect to Backblaze B2. Connection reset")
|
||||
errors["base"] = "cannot_connect"
|
||||
except exception.MissingAccountData:
|
||||
# This generally indicates an issue with how InMemoryAccountInfo is used
|
||||
_LOGGER.error(
|
||||
"Missing account data during Backblaze B2 authorization for Key ID '%s'",
|
||||
user_input[CONF_KEY_ID],
|
||||
)
|
||||
errors["base"] = "invalid_credentials"
|
||||
except Exception:
|
||||
_LOGGER.exception(
|
||||
"An unexpected error occurred during Backblaze B2 configuration for Key ID '%s'",
|
||||
user_input[CONF_KEY_ID],
|
||||
)
|
||||
errors["base"] = "unknown"
|
||||
|
||||
return errors, placeholders
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication flow."""
|
||||
self.reauth_entry = self.hass.config_entries.async_get_entry(
|
||||
self.context["entry_id"]
|
||||
)
|
||||
assert self.reauth_entry is not None
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication."""
|
||||
assert self.reauth_entry is not None
|
||||
errors: dict[str, str] = {}
|
||||
placeholders: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._abort_if_duplicate_credentials(user_input)
|
||||
|
||||
validation_input = {
|
||||
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||
CONF_BUCKET: self.reauth_entry.data[CONF_BUCKET],
|
||||
CONF_PREFIX: self.reauth_entry.data[CONF_PREFIX],
|
||||
}
|
||||
|
||||
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||
validation_input
|
||||
)
|
||||
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
self.reauth_entry,
|
||||
data_updates={
|
||||
CONF_KEY_ID: user_input[CONF_KEY_ID],
|
||||
CONF_APPLICATION_KEY: user_input[CONF_APPLICATION_KEY],
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_KEY_ID): cv.string,
|
||||
vol.Required(CONF_APPLICATION_KEY): TextSelector(
|
||||
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
|
||||
),
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"brand_name": "Backblaze B2",
|
||||
"bucket": self.reauth_entry.data[CONF_BUCKET],
|
||||
**placeholders,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration flow."""
|
||||
entry = self.hass.config_entries.async_get_entry(self.context["entry_id"])
|
||||
assert entry is not None
|
||||
|
||||
if user_input is not None:
|
||||
self._abort_if_duplicate_credentials(user_input)
|
||||
|
||||
errors, placeholders = await self._async_validate_backblaze_connection(
|
||||
user_input
|
||||
)
|
||||
|
||||
if not errors:
|
||||
if user_input[CONF_PREFIX] and not user_input[CONF_PREFIX].endswith(
|
||||
"/"
|
||||
):
|
||||
user_input[CONF_PREFIX] += "/"
|
||||
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates=user_input,
|
||||
)
|
||||
else:
|
||||
errors = {}
|
||||
placeholders = {}
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
STEP_USER_DATA_SCHEMA, user_input or entry.data
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"brand_name": "Backblaze B2", **placeholders},
|
||||
)
|
||||
22
homeassistant/components/backblaze_b2/const.py
Normal file
22
homeassistant/components/backblaze_b2/const.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""Constants for the Backblaze B2 integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN: Final = "backblaze_b2"
|
||||
|
||||
CONF_KEY_ID = "key_id"
|
||||
CONF_APPLICATION_KEY = "application_key"
|
||||
CONF_BUCKET = "bucket"
|
||||
CONF_PREFIX = "prefix"
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
|
||||
METADATA_FILE_SUFFIX = ".metadata.json"
|
||||
METADATA_VERSION = "1"
|
||||
|
||||
BACKBLAZE_REALM = "production"
|
||||
56
homeassistant/components/backblaze_b2/diagnostics.py
Normal file
56
homeassistant/components/backblaze_b2/diagnostics.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""Diagnostics support for Backblaze B2."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import BackblazeConfigEntry
|
||||
from .const import CONF_APPLICATION_KEY, CONF_KEY_ID
|
||||
|
||||
TO_REDACT_ENTRY_DATA = {CONF_APPLICATION_KEY, CONF_KEY_ID}
|
||||
TO_REDACT_ACCOUNT_DATA_ALLOWED = {"bucketId", "bucketName", "namePrefix"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: BackblazeConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
bucket = entry.runtime_data
|
||||
|
||||
try:
|
||||
bucket_info = {
|
||||
"name": bucket.name,
|
||||
"id": bucket.id_,
|
||||
"type": bucket.type_,
|
||||
"cors_rules": bucket.cors_rules,
|
||||
"lifecycle_rules": bucket.lifecycle_rules,
|
||||
"revision": bucket.revision,
|
||||
}
|
||||
|
||||
account_info = bucket.api.account_info
|
||||
account_data: dict[str, Any] = {
|
||||
"account_id": account_info.get_account_id(),
|
||||
"api_url": account_info.get_api_url(),
|
||||
"download_url": account_info.get_download_url(),
|
||||
"minimum_part_size": account_info.get_minimum_part_size(),
|
||||
"allowed": account_info.get_allowed(),
|
||||
}
|
||||
|
||||
if isinstance(account_data["allowed"], dict):
|
||||
account_data["allowed"] = async_redact_data(
|
||||
account_data["allowed"], TO_REDACT_ACCOUNT_DATA_ALLOWED
|
||||
)
|
||||
|
||||
except (AttributeError, TypeError, ValueError, KeyError):
|
||||
bucket_info = {"name": "unknown", "id": "unknown"}
|
||||
account_data = {"error": "Failed to retrieve detailed account information"}
|
||||
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT_ENTRY_DATA),
|
||||
"entry_options": entry.options,
|
||||
"bucket_info": bucket_info,
|
||||
"account_info": account_data,
|
||||
}
|
||||
12
homeassistant/components/backblaze_b2/manifest.json
Normal file
12
homeassistant/components/backblaze_b2/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "backblaze_b2",
|
||||
"name": "Backblaze B2",
|
||||
"codeowners": ["@hugo-vrijswijk", "@ElCruncharino"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/backblaze_b2",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["b2sdk"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["b2sdk==2.8.1"]
|
||||
}
|
||||
124
homeassistant/components/backblaze_b2/quality_scale.yaml
Normal file
124
homeassistant/components/backblaze_b2/quality_scale.yaml
Normal file
@@ -0,0 +1,124 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: Integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: This integration does not have any custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: Entities of this integration do not explicitly subscribe to events.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: This integration does not have an options flow.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
diagnostics: done
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: Backblaze B2 is a cloud service that is not discovered on the network.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Backblaze B2 is a cloud service that is not discovered on the network.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: This integration does not poll.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: The integration extends core functionality and does not require examples.
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This integration does not support physical devices.
|
||||
docs-supported-functions:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: This integration does not have entities.
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: This integration does not use icons.
|
||||
reconfiguration-flow: done
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: This integration does not have devices.
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: exempt
|
||||
comment: |
|
||||
The b2sdk library is synchronous by design. All sync operations are properly
|
||||
wrapped with async_add_executor_job to prevent blocking the event loop.
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: |
|
||||
The b2sdk library does not support custom HTTP session injection.
|
||||
It manages HTTP connections internally through its own session management.
|
||||
strict-typing:
|
||||
status: exempt
|
||||
comment: |
|
||||
The b2sdk dependency does not include a py.typed file and is not PEP 561 compliant.
|
||||
This is outside the integration's control as it's a third-party library requirement.
|
||||
93
homeassistant/components/backblaze_b2/repairs.py
Normal file
93
homeassistant/components/backblaze_b2/repairs.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Repair issues for the Backblaze B2 integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from b2sdk.v2.exception import (
|
||||
B2Error,
|
||||
NonExistentBucket,
|
||||
RestrictedBucket,
|
||||
Unauthorized,
|
||||
)
|
||||
|
||||
from homeassistant.components.repairs import ConfirmRepairFlow
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
|
||||
from .const import CONF_BUCKET, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ISSUE_BUCKET_ACCESS_RESTRICTED = "bucket_access_restricted"
|
||||
ISSUE_BUCKET_NOT_FOUND = "bucket_not_found"
|
||||
|
||||
|
||||
def _create_issue(
|
||||
hass: HomeAssistant,
|
||||
entry: ConfigEntry,
|
||||
issue_type: str,
|
||||
bucket_name: str,
|
||||
) -> None:
|
||||
"""Create a repair issue with standard parameters."""
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
f"{issue_type}_{entry.entry_id}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key=issue_type,
|
||||
translation_placeholders={
|
||||
"brand_name": "Backblaze B2",
|
||||
"title": entry.title,
|
||||
"bucket_name": bucket_name,
|
||||
"entry_id": entry.entry_id,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def create_bucket_access_restricted_issue(
|
||||
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
|
||||
) -> None:
|
||||
"""Create a repair issue for restricted bucket access."""
|
||||
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, bucket_name)
|
||||
|
||||
|
||||
def create_bucket_not_found_issue(
|
||||
hass: HomeAssistant, entry: ConfigEntry, bucket_name: str
|
||||
) -> None:
|
||||
"""Create a repair issue for non-existent bucket."""
|
||||
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, bucket_name)
|
||||
|
||||
|
||||
async def async_check_for_repair_issues(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> None:
|
||||
"""Check for common issues that require user action."""
|
||||
bucket = entry.runtime_data
|
||||
restricted_issue_id = f"{ISSUE_BUCKET_ACCESS_RESTRICTED}_{entry.entry_id}"
|
||||
not_found_issue_id = f"{ISSUE_BUCKET_NOT_FOUND}_{entry.entry_id}"
|
||||
|
||||
try:
|
||||
await hass.async_add_executor_job(bucket.api.account_info.get_allowed)
|
||||
ir.async_delete_issue(hass, DOMAIN, restricted_issue_id)
|
||||
ir.async_delete_issue(hass, DOMAIN, not_found_issue_id)
|
||||
except Unauthorized:
|
||||
entry.async_start_reauth(hass)
|
||||
except RestrictedBucket as err:
|
||||
_create_issue(hass, entry, ISSUE_BUCKET_ACCESS_RESTRICTED, err.bucket_name)
|
||||
except NonExistentBucket:
|
||||
_create_issue(hass, entry, ISSUE_BUCKET_NOT_FOUND, entry.data[CONF_BUCKET])
|
||||
except B2Error as err:
|
||||
_LOGGER.debug("B2 connectivity test failed: %s", err)
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> ConfirmRepairFlow:
|
||||
"""Create a fix flow for Backblaze B2 issues."""
|
||||
return ConfirmRepairFlow()
|
||||
92
homeassistant/components/backblaze_b2/strings.json
Normal file
92
homeassistant/components/backblaze_b2/strings.json
Normal file
@@ -0,0 +1,92 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_bucket_name": "[%key:component::backblaze_b2::exceptions::invalid_bucket_name::message%]",
|
||||
"invalid_capability": "[%key:component::backblaze_b2::exceptions::invalid_capability::message%]",
|
||||
"invalid_credentials": "[%key:component::backblaze_b2::exceptions::invalid_credentials::message%]",
|
||||
"invalid_prefix": "[%key:component::backblaze_b2::exceptions::invalid_prefix::message%]",
|
||||
"restricted_bucket": "[%key:component::backblaze_b2::exceptions::restricted_bucket::message%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"application_key": "Application key",
|
||||
"key_id": "Key ID"
|
||||
},
|
||||
"data_description": {
|
||||
"application_key": "Application key to connect to {brand_name}",
|
||||
"key_id": "Key ID to connect to {brand_name}"
|
||||
},
|
||||
"description": "Update your {brand_name} credentials for bucket {bucket}.",
|
||||
"title": "Reauthenticate {brand_name}"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"application_key": "Application key",
|
||||
"bucket": "Bucket name",
|
||||
"key_id": "Key ID",
|
||||
"prefix": "Folder prefix (optional)"
|
||||
},
|
||||
"data_description": {
|
||||
"application_key": "Application key to connect to {brand_name}",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"key_id": "Key ID to connect to {brand_name}",
|
||||
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
|
||||
},
|
||||
"title": "Reconfigure {brand_name}"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"application_key": "Application key",
|
||||
"bucket": "Bucket name",
|
||||
"key_id": "Key ID",
|
||||
"prefix": "Folder prefix (optional)"
|
||||
},
|
||||
"data_description": {
|
||||
"application_key": "Application key to connect to {brand_name}",
|
||||
"bucket": "Bucket must already exist and be writable by the provided credentials.",
|
||||
"key_id": "Key ID to connect to {brand_name}",
|
||||
"prefix": "Directory path to store backup files in. Leave empty to store in the root."
|
||||
},
|
||||
"title": "Add {brand_name} backup"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"cannot_connect": {
|
||||
"message": "Cannot connect to endpoint"
|
||||
},
|
||||
"invalid_bucket_name": {
|
||||
"message": "Bucket does not exist or is not writable by the provided credentials."
|
||||
},
|
||||
"invalid_capability": {
|
||||
"message": "Application key does not have the required read/write capabilities."
|
||||
},
|
||||
"invalid_credentials": {
|
||||
"message": "Bucket cannot be accessed using provided of key ID and application key."
|
||||
},
|
||||
"invalid_prefix": {
|
||||
"message": "Prefix is not allowed for provided key. Must start with {allowed_prefix}."
|
||||
},
|
||||
"restricted_bucket": {
|
||||
"message": "Application key is restricted to bucket {restricted_bucket_name}."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"bucket_access_restricted": {
|
||||
"description": "Access to your {brand_name} bucket {bucket_name} is restricted for the current credentials. This means your application key may only have access to specific buckets, but not this one. To fix this issue:\n\n1. Log in to your {brand_name} account\n2. Check your application key restrictions\n3. Either use a different bucket that your key can access, or create a new application key with access to {bucket_name}\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
|
||||
"title": "{brand_name} bucket access restricted"
|
||||
},
|
||||
"bucket_not_found": {
|
||||
"description": "The {brand_name} bucket {bucket_name} cannot be found or accessed. This could mean:\n\n1. The bucket was deleted\n2. The bucket name was changed\n3. Your credentials no longer have access to this bucket\n\nTo fix this issue:\n\n1. Log in to your {brand_name} account\n2. Verify the bucket still exists and check its name\n3. Ensure your application key has access to this bucket\n4. Go to Settings > Devices & Services > {brand_name} and reconfigure the integration settings\n\nOnce you update the integration settings, this issue will be automatically resolved.",
|
||||
"title": "{brand_name} bucket not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"],
|
||||
"requirements": ["cronsim==2.7", "securetar==2025.2.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bang_olufsen",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["mozart-api==4.1.1.116.4"],
|
||||
"requirements": ["mozart-api==5.1.0.247.1"],
|
||||
"zeroconf": ["_bangolufsen._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["bluecurrent_api"],
|
||||
"requirements": ["bluecurrent-api==1.3.1"]
|
||||
"requirements": ["bluecurrent-api==1.3.2"]
|
||||
}
|
||||
|
||||
@@ -72,7 +72,7 @@ class BlueMaestroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
title=self._discovered_devices[address], data={}
|
||||
)
|
||||
|
||||
current_addresses = self._async_current_ids()
|
||||
current_addresses = self._async_current_ids(include_ignore=False)
|
||||
for discovery_info in async_discovered_service_info(self.hass, False):
|
||||
address = discovery_info.address
|
||||
if address in current_addresses or address in self._discovered_devices:
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==2.44.5",
|
||||
"dbus-fast==2.45.0",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -99,6 +99,12 @@ def deserialize_entity_description(
|
||||
descriptions_class = descriptions_class._dataclass # noqa: SLF001
|
||||
for field in cached_fields(descriptions_class):
|
||||
field_name = field.name
|
||||
# Only set fields that are in the data
|
||||
# otherwise we would override default values with None
|
||||
# causing side effects
|
||||
if field_name not in data:
|
||||
continue
|
||||
|
||||
# It would be nice if field.type returned the actual
|
||||
# type instead of a str so we could avoid writing this
|
||||
# out, but it doesn't. If we end up using this in more
|
||||
|
||||
@@ -9,7 +9,7 @@ from brother import Brother, SnmpError
|
||||
from homeassistant.components.snmp import async_get_snmp_engine
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
|
||||
from .const import (
|
||||
CONF_COMMUNITY,
|
||||
@@ -50,6 +50,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: BrotherConfigEntry) -> b
|
||||
coordinator = BrotherDataUpdateCoordinator(hass, entry, brother)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
if brother.serial.lower() != entry.unique_id:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="serial_mismatch",
|
||||
translation_placeholders={
|
||||
"device": entry.title,
|
||||
},
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -13,6 +13,7 @@ from homeassistant.const import CONF_HOST, CONF_PORT, CONF_TYPE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.util.network import is_host_valid
|
||||
|
||||
@@ -21,6 +22,7 @@ from .const import (
|
||||
DEFAULT_COMMUNITY,
|
||||
DEFAULT_PORT,
|
||||
DOMAIN,
|
||||
PRINTER_TYPE_LASER,
|
||||
PRINTER_TYPES,
|
||||
SECTION_ADVANCED_SETTINGS,
|
||||
)
|
||||
@@ -28,7 +30,12 @@ from .const import (
|
||||
DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=PRINTER_TYPES,
|
||||
translation_key="printer_type",
|
||||
)
|
||||
),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
@@ -42,7 +49,12 @@ DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
ZEROCONF_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_TYPE, default="laser"): vol.In(PRINTER_TYPES),
|
||||
vol.Required(CONF_TYPE, default=PRINTER_TYPE_LASER): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=PRINTER_TYPES,
|
||||
translation_key="printer_type",
|
||||
)
|
||||
),
|
||||
vol.Required(SECTION_ADVANCED_SETTINGS): section(
|
||||
vol.Schema(
|
||||
{
|
||||
|
||||
@@ -7,7 +7,10 @@ from typing import Final
|
||||
|
||||
DOMAIN: Final = "brother"
|
||||
|
||||
PRINTER_TYPES: Final = ["laser", "ink"]
|
||||
PRINTER_TYPE_LASER = "laser"
|
||||
PRINTER_TYPE_INK = "ink"
|
||||
|
||||
PRINTER_TYPES: Final = [PRINTER_TYPE_LASER, PRINTER_TYPE_INK]
|
||||
|
||||
UPDATE_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
|
||||
30
homeassistant/components/brother/entity.py
Normal file
30
homeassistant/components/brother/entity.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Define the Brother entity."""
|
||||
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherDataUpdateCoordinator
|
||||
|
||||
|
||||
class BrotherPrinterEntity(CoordinatorEntity[BrotherDataUpdateCoordinator]):
|
||||
"""Define a Brother Printer entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BrotherDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{coordinator.brother.host}/",
|
||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
@@ -19,13 +19,15 @@ from homeassistant.components.sensor import (
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import BrotherConfigEntry, BrotherDataUpdateCoordinator
|
||||
from .entity import BrotherPrinterEntity
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
ATTR_COUNTER = "counter"
|
||||
ATTR_REMAINING_PAGES = "remaining_pages"
|
||||
@@ -330,12 +332,9 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class BrotherPrinterSensor(
|
||||
CoordinatorEntity[BrotherDataUpdateCoordinator], SensorEntity
|
||||
):
|
||||
"""Define an Brother Printer sensor."""
|
||||
class BrotherPrinterSensor(BrotherPrinterEntity, SensorEntity):
|
||||
"""Define a Brother Printer sensor."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
entity_description: BrotherSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
@@ -345,16 +344,7 @@ class BrotherPrinterSensor(
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{coordinator.brother.host}/",
|
||||
identifiers={(DOMAIN, coordinator.brother.serial)},
|
||||
connections={(CONNECTION_NETWORK_MAC, coordinator.brother.mac)},
|
||||
serial_number=coordinator.brother.serial,
|
||||
manufacturer="Brother",
|
||||
model=coordinator.brother.model,
|
||||
name=coordinator.brother.model,
|
||||
sw_version=coordinator.brother.firmware,
|
||||
)
|
||||
|
||||
self._attr_native_value = description.value(coordinator.data)
|
||||
self._attr_unique_id = f"{coordinator.brother.serial.lower()}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
@@ -38,11 +38,11 @@
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"type": "Type of the printer"
|
||||
"type": "Printer type"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of the Brother printer to control.",
|
||||
"type": "Brother printer type: ink or laser."
|
||||
"type": "The type of the Brother printer."
|
||||
},
|
||||
"sections": {
|
||||
"advanced_settings": {
|
||||
@@ -207,8 +207,19 @@
|
||||
"cannot_connect": {
|
||||
"message": "An error occurred while connecting to the {device} printer: {error}"
|
||||
},
|
||||
"serial_mismatch": {
|
||||
"message": "The serial number for {device} doesn't match the one in the configuration. It's possible that the two Brother printers have swapped IP addresses. Restore the previous IP address configuration or reconfigure the devices with Home Assistant."
|
||||
},
|
||||
"update_error": {
|
||||
"message": "An error occurred while retrieving data from the {device} printer: {error}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"printer_type": {
|
||||
"options": {
|
||||
"ink": "ink",
|
||||
"laser": "laser"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -63,6 +63,7 @@ BINARY_SENSOR_DESCRIPTIONS = {
|
||||
),
|
||||
BTHomeBinarySensorDeviceClass.GENERIC: BinarySensorEntityDescription(
|
||||
key=BTHomeBinarySensorDeviceClass.GENERIC,
|
||||
translation_key="generic",
|
||||
),
|
||||
BTHomeBinarySensorDeviceClass.LIGHT: BinarySensorEntityDescription(
|
||||
key=BTHomeBinarySensorDeviceClass.LIGHT,
|
||||
@@ -159,10 +160,7 @@ def sensor_update_to_bluetooth_data_update(
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.native_value
|
||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||
},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.binary_entity_values.items()
|
||||
},
|
||||
entity_names={},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -59,6 +59,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeSensorDeviceClass.ACCELERATION}_{Units.ACCELERATION_METERS_PER_SQUARE_SECOND}",
|
||||
native_unit_of_measurement=Units.ACCELERATION_METERS_PER_SQUARE_SECOND,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="acceleration",
|
||||
),
|
||||
# Battery (percent)
|
||||
(BTHomeSensorDeviceClass.BATTERY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
@@ -72,6 +73,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
(BTHomeExtendedSensorDeviceClass.CHANNEL, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.CHANNEL),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="channel",
|
||||
),
|
||||
# Conductivity (μS/cm)
|
||||
(
|
||||
@@ -87,6 +89,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
(BTHomeSensorDeviceClass.COUNT, None): SensorEntityDescription(
|
||||
key=str(BTHomeSensorDeviceClass.COUNT),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="count",
|
||||
),
|
||||
# CO2 (parts per million)
|
||||
(
|
||||
@@ -114,12 +117,14 @@ SENSOR_DESCRIPTIONS = {
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="dew_point",
|
||||
),
|
||||
# Directions (°)
|
||||
(BTHomeExtendedSensorDeviceClass.DIRECTION, Units.DEGREE): SensorEntityDescription(
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.DIRECTION}_{Units.DEGREE}",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="direction",
|
||||
),
|
||||
# Distance (mm)
|
||||
(
|
||||
@@ -173,6 +178,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeSensorDeviceClass.GYROSCOPE}_{Units.GYROSCOPE_DEGREES_PER_SECOND}",
|
||||
native_unit_of_measurement=Units.GYROSCOPE_DEGREES_PER_SECOND,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="gyroscope",
|
||||
),
|
||||
# Humidity in (percent)
|
||||
(BTHomeSensorDeviceClass.HUMIDITY, Units.PERCENTAGE): SensorEntityDescription(
|
||||
@@ -215,6 +221,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
entity_registry_enabled_default=False,
|
||||
translation_key="packet_id",
|
||||
),
|
||||
# PM10 (μg/m3)
|
||||
(
|
||||
@@ -263,12 +270,14 @@ SENSOR_DESCRIPTIONS = {
|
||||
# Raw (-)
|
||||
(BTHomeExtendedSensorDeviceClass.RAW, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.RAW),
|
||||
translation_key="raw",
|
||||
),
|
||||
# Rotation (°)
|
||||
(BTHomeSensorDeviceClass.ROTATION, Units.DEGREE): SensorEntityDescription(
|
||||
key=f"{BTHomeSensorDeviceClass.ROTATION}_{Units.DEGREE}",
|
||||
native_unit_of_measurement=DEGREE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="rotation",
|
||||
),
|
||||
# Rotational speed (rpm)
|
||||
(
|
||||
@@ -278,6 +287,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
key=f"{BTHomeExtendedSensorDeviceClass.ROTATIONAL_SPEED}_{Units.REVOLUTIONS_PER_MINUTE}",
|
||||
native_unit_of_measurement=REVOLUTIONS_PER_MINUTE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="rotational_speed",
|
||||
),
|
||||
# Signal Strength (RSSI) (dB)
|
||||
(
|
||||
@@ -311,6 +321,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
# Text (-)
|
||||
(BTHomeExtendedSensorDeviceClass.TEXT, None): SensorEntityDescription(
|
||||
key=str(BTHomeExtendedSensorDeviceClass.TEXT),
|
||||
translation_key="text",
|
||||
),
|
||||
# Timestamp (datetime object)
|
||||
(
|
||||
@@ -327,6 +338,7 @@ SENSOR_DESCRIPTIONS = {
|
||||
): SensorEntityDescription(
|
||||
key=str(BTHomeSensorDeviceClass.UV_INDEX),
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="uv_index",
|
||||
),
|
||||
# Volatile organic Compounds (VOC) (μg/m3)
|
||||
(
|
||||
@@ -423,10 +435,7 @@ def sensor_update_to_bluetooth_data_update(
|
||||
)
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
entity_names={
|
||||
device_key_to_bluetooth_entity_key(device_key): sensor_values.name
|
||||
for device_key, sensor_values in sensor_update.entity_values.items()
|
||||
},
|
||||
entity_names={},
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -47,6 +47,11 @@
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"generic": {
|
||||
"name": "Generic"
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"button": {
|
||||
"state_attributes": {
|
||||
@@ -73,6 +78,44 @@
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"acceleration": {
|
||||
"name": "Acceleration"
|
||||
},
|
||||
"channel": {
|
||||
"name": "Channel"
|
||||
},
|
||||
"count": {
|
||||
"name": "Count"
|
||||
},
|
||||
"dew_point": {
|
||||
"name": "Dew point"
|
||||
},
|
||||
"direction": {
|
||||
"name": "Direction"
|
||||
},
|
||||
"gyroscope": {
|
||||
"name": "Gyroscope"
|
||||
},
|
||||
"packet_id": {
|
||||
"name": "Packet ID"
|
||||
},
|
||||
"raw": {
|
||||
"name": "Raw"
|
||||
},
|
||||
"rotation": {
|
||||
"name": "Rotation"
|
||||
},
|
||||
"rotational_speed": {
|
||||
"name": "Rotational speed"
|
||||
},
|
||||
"text": {
|
||||
"name": "Text"
|
||||
},
|
||||
"uv_index": {
|
||||
"name": "UV Index"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["caldav", "vobject"],
|
||||
"requirements": ["caldav==1.6.0", "icalendar==6.3.1"]
|
||||
"requirements": ["caldav==2.1.0", "icalendar==6.3.1", "vobject==0.9.9"]
|
||||
}
|
||||
|
||||
@@ -57,9 +57,9 @@ async def _async_reproduce_states(
|
||||
await call_service(SERVICE_SET_HVAC_MODE, [], {ATTR_HVAC_MODE: state.state})
|
||||
|
||||
if (
|
||||
(ATTR_TEMPERATURE in state.attributes)
|
||||
or (ATTR_TARGET_TEMP_HIGH in state.attributes)
|
||||
or (ATTR_TARGET_TEMP_LOW in state.attributes)
|
||||
(state.attributes.get(ATTR_TEMPERATURE) is not None)
|
||||
or (state.attributes.get(ATTR_TARGET_TEMP_HIGH) is not None)
|
||||
or (state.attributes.get(ATTR_TARGET_TEMP_LOW) is not None)
|
||||
):
|
||||
await call_service(
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
|
||||
@@ -285,5 +285,93 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Climate"
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"cooling": {
|
||||
"description": "Triggers when a climate starts cooling.",
|
||||
"name": "When a climate starts cooling"
|
||||
},
|
||||
"current_humidity_changed": {
|
||||
"description": "Triggers when the current humidity of a climate changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Only trigger when the current humidity goes above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Only trigger when the current humidity goes below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "When current humidity changes"
|
||||
},
|
||||
"current_temperature_changed": {
|
||||
"description": "Triggers when the current temperature of a climate changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Only trigger when the current temperature goes above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Only trigger when the current temperature goes below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "When current temperature changes"
|
||||
},
|
||||
"drying": {
|
||||
"description": "Triggers when a climate starts drying.",
|
||||
"name": "When a climate starts drying"
|
||||
},
|
||||
"heating": {
|
||||
"description": "Triggers when a climate starts heating.",
|
||||
"name": "When a climate starts heating"
|
||||
},
|
||||
"mode_changed": {
|
||||
"description": "Triggers when the HVAC mode of a climate changes.",
|
||||
"fields": {
|
||||
"hvac_mode": {
|
||||
"description": "The HVAC modes to trigger on. If empty, triggers on all mode changes.",
|
||||
"name": "HVAC modes"
|
||||
}
|
||||
},
|
||||
"name": "When HVAC mode changes"
|
||||
},
|
||||
"target_humidity_changed": {
|
||||
"description": "Triggers when the target humidity of a climate changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Only trigger when the target humidity goes above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Only trigger when the target humidity goes below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "When target humidity changes"
|
||||
},
|
||||
"target_temperature_changed": {
|
||||
"description": "Triggers when the target temperature of a climate changes.",
|
||||
"fields": {
|
||||
"above": {
|
||||
"description": "Only trigger when the target temperature goes above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Only trigger when the target temperature goes below this value.",
|
||||
"name": "Below"
|
||||
}
|
||||
},
|
||||
"name": "When target temperature changes"
|
||||
},
|
||||
"turns_off": {
|
||||
"description": "Triggers when a climate turns off.",
|
||||
"name": "When a climate turns off"
|
||||
},
|
||||
"turns_on": {
|
||||
"description": "Triggers when a climate turns on.",
|
||||
"name": "When a climate turns on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
817
homeassistant/components/climate/trigger.py
Normal file
817
homeassistant/components/climate/trigger.py
Normal file
@@ -0,0 +1,817 @@
|
||||
"""Provides triggers for climate."""
|
||||
|
||||
from typing import TYPE_CHECKING, cast, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_TEMPERATURE,
|
||||
CONF_ABOVE,
|
||||
CONF_BELOW,
|
||||
CONF_OPTIONS,
|
||||
CONF_TARGET,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.target import (
|
||||
TargetStateChangedData,
|
||||
async_track_target_selector_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
ATTR_CURRENT_HUMIDITY,
|
||||
ATTR_CURRENT_TEMPERATURE,
|
||||
ATTR_HUMIDITY,
|
||||
ATTR_HVAC_ACTION,
|
||||
ATTR_HVAC_MODE,
|
||||
DOMAIN,
|
||||
HVAC_MODES,
|
||||
HVACMode,
|
||||
)
|
||||
|
||||
CLIMATE_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
MODE_CHANGED_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {
|
||||
vol.Optional(ATTR_HVAC_MODE, default=[]): vol.All(
|
||||
cv.ensure_list, [vol.In(HVAC_MODES)]
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
THRESHOLD_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {
|
||||
vol.Optional(CONF_ABOVE): vol.Coerce(float),
|
||||
vol.Optional(CONF_BELOW): vol.Coerce(float),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class ClimateTurnsOnTrigger(Trigger):
|
||||
"""Trigger for when a climate turns on."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate turns on trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if climate turned on (from off to any other mode)
|
||||
if (
|
||||
from_state is not None
|
||||
and from_state.state == HVACMode.OFF
|
||||
and to_state.state != HVACMode.OFF
|
||||
):
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} turned on",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateTurnsOffTrigger(Trigger):
|
||||
"""Trigger for when a climate turns off."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate turns off trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if climate turned off (from any mode to off)
|
||||
if (
|
||||
from_state is not None
|
||||
and from_state.state != HVACMode.OFF
|
||||
and to_state.state == HVACMode.OFF
|
||||
):
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} turned off",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateModeChangedTrigger(Trigger):
|
||||
"""Trigger for when a climate mode changes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, MODE_CHANGED_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate mode changed trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
hvac_modes_filter = self._options[ATTR_HVAC_MODE]
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if hvac_mode changed
|
||||
if from_state is not None and from_state.state != to_state.state:
|
||||
# If hvac_modes filter is specified, check if the new mode matches
|
||||
if hvac_modes_filter and to_state.state not in hvac_modes_filter:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} mode changed to {to_state.state}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateCoolingTrigger(Trigger):
|
||||
"""Trigger for when a climate starts cooling."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate cooling trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if climate started cooling
|
||||
from_action = from_state.attributes.get(ATTR_HVAC_ACTION) if from_state else None
|
||||
to_action = to_state.attributes.get(ATTR_HVAC_ACTION)
|
||||
|
||||
if from_action != "cooling" and to_action == "cooling":
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} started cooling",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateHeatingTrigger(Trigger):
|
||||
"""Trigger for when a climate starts heating."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate heating trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if climate started heating
|
||||
from_action = from_state.attributes.get(ATTR_HVAC_ACTION) if from_state else None
|
||||
to_action = to_state.attributes.get(ATTR_HVAC_ACTION)
|
||||
|
||||
if from_action != "heating" and to_action == "heating":
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} started heating",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateDryingTrigger(Trigger):
|
||||
"""Trigger for when a climate starts drying."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, CLIMATE_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate drying trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if climate started drying
|
||||
from_action = from_state.attributes.get(ATTR_HVAC_ACTION) if from_state else None
|
||||
to_action = to_state.attributes.get(ATTR_HVAC_ACTION)
|
||||
|
||||
if from_action != "drying" and to_action == "drying":
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} started drying",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateTargetTemperatureChangedTrigger(Trigger):
|
||||
"""Trigger for when a climate target temperature changes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, THRESHOLD_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate target temperature changed trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
above = self._options.get(CONF_ABOVE)
|
||||
below = self._options.get(CONF_BELOW)
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if target temperature changed
|
||||
from_temp = (
|
||||
from_state.attributes.get(ATTR_TEMPERATURE) if from_state else None
|
||||
)
|
||||
to_temp = to_state.attributes.get(ATTR_TEMPERATURE)
|
||||
|
||||
if to_temp is None or from_temp == to_temp:
|
||||
return
|
||||
|
||||
# Apply threshold filters if specified
|
||||
if above is not None and to_temp <= above:
|
||||
return
|
||||
if below is not None and to_temp >= below:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} target temperature changed to {to_temp}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateCurrentTemperatureChangedTrigger(Trigger):
|
||||
"""Trigger for when a climate current temperature changes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, THRESHOLD_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate current temperature changed trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
above = self._options.get(CONF_ABOVE)
|
||||
below = self._options.get(CONF_BELOW)
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if current temperature changed
|
||||
from_temp = (
|
||||
from_state.attributes.get(ATTR_CURRENT_TEMPERATURE)
|
||||
if from_state
|
||||
else None
|
||||
)
|
||||
to_temp = to_state.attributes.get(ATTR_CURRENT_TEMPERATURE)
|
||||
|
||||
if to_temp is None or from_temp == to_temp:
|
||||
return
|
||||
|
||||
# Apply threshold filters if specified
|
||||
if above is not None and to_temp <= above:
|
||||
return
|
||||
if below is not None and to_temp >= below:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} current temperature changed to {to_temp}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateTargetHumidityChangedTrigger(Trigger):
|
||||
"""Trigger for when a climate target humidity changes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, THRESHOLD_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate target humidity changed trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
above = self._options.get(CONF_ABOVE)
|
||||
below = self._options.get(CONF_BELOW)
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if target humidity changed
|
||||
from_humidity = (
|
||||
from_state.attributes.get(ATTR_HUMIDITY) if from_state else None
|
||||
)
|
||||
to_humidity = to_state.attributes.get(ATTR_HUMIDITY)
|
||||
|
||||
if to_humidity is None or from_humidity == to_humidity:
|
||||
return
|
||||
|
||||
# Apply threshold filters if specified
|
||||
if above is not None and to_humidity <= above:
|
||||
return
|
||||
if below is not None and to_humidity >= below:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} target humidity changed to {to_humidity}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class ClimateCurrentHumidityChangedTrigger(Trigger):
|
||||
"""Trigger for when a climate current humidity changes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, THRESHOLD_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the climate current humidity changed trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.options is not None
|
||||
assert config.target is not None
|
||||
self._options = config.options
|
||||
self._target = config.target
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
above = self._options.get(CONF_ABOVE)
|
||||
below = self._options.get(CONF_BELOW)
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Check if current humidity changed
|
||||
from_humidity = (
|
||||
from_state.attributes.get(ATTR_CURRENT_HUMIDITY)
|
||||
if from_state
|
||||
else None
|
||||
)
|
||||
to_humidity = to_state.attributes.get(ATTR_CURRENT_HUMIDITY)
|
||||
|
||||
if to_humidity is None or from_humidity == to_humidity:
|
||||
return
|
||||
|
||||
# Apply threshold filters if specified
|
||||
if above is not None and to_humidity <= above:
|
||||
return
|
||||
if below is not None and to_humidity >= below:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"climate {entity_id} current humidity changed to {to_humidity}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turns_on": ClimateTurnsOnTrigger,
|
||||
"turns_off": ClimateTurnsOffTrigger,
|
||||
"mode_changed": ClimateModeChangedTrigger,
|
||||
"cooling": ClimateCoolingTrigger,
|
||||
"heating": ClimateHeatingTrigger,
|
||||
"drying": ClimateDryingTrigger,
|
||||
"target_temperature_changed": ClimateTargetTemperatureChangedTrigger,
|
||||
"current_temperature_changed": ClimateCurrentTemperatureChangedTrigger,
|
||||
"target_humidity_changed": ClimateTargetHumidityChangedTrigger,
|
||||
"current_humidity_changed": ClimateCurrentHumidityChangedTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for climate."""
|
||||
return TRIGGERS
|
||||
128
homeassistant/components/climate/triggers.yaml
Normal file
128
homeassistant/components/climate/triggers.yaml
Normal file
@@ -0,0 +1,128 @@
|
||||
turns_on:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
|
||||
turns_off:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
|
||||
mode_changed:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
hvac_mode:
|
||||
required: false
|
||||
default: []
|
||||
selector:
|
||||
select:
|
||||
multiple: true
|
||||
mode: dropdown
|
||||
options:
|
||||
- label: "Off"
|
||||
value: "off"
|
||||
- label: "Heat"
|
||||
value: "heat"
|
||||
- label: "Cool"
|
||||
value: "cool"
|
||||
- label: "Heat/Cool"
|
||||
value: "heat_cool"
|
||||
- label: "Auto"
|
||||
value: "auto"
|
||||
- label: "Dry"
|
||||
value: "dry"
|
||||
- label: "Fan only"
|
||||
value: "fan_only"
|
||||
|
||||
cooling:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
|
||||
heating:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
|
||||
drying:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
|
||||
target_temperature_changed:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
above:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
step: 0.1
|
||||
below:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
step: 0.1
|
||||
|
||||
current_temperature_changed:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
above:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
step: 0.1
|
||||
below:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
step: 0.1
|
||||
|
||||
target_humidity_changed:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
above:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
min: 0
|
||||
max: 100
|
||||
below:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
min: 0
|
||||
max: 100
|
||||
|
||||
current_humidity_changed:
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
above:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
min: 0
|
||||
max: 100
|
||||
below:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
mode: box
|
||||
min: 0
|
||||
max: 100
|
||||
@@ -71,8 +71,11 @@ async def _get_services(hass: HomeAssistant) -> list[dict[str, Any]]:
|
||||
services = await account_link.async_fetch_available_services(
|
||||
hass.data[DATA_CLOUD]
|
||||
)
|
||||
except (aiohttp.ClientError, TimeoutError):
|
||||
return []
|
||||
except (aiohttp.ClientError, TimeoutError) as err:
|
||||
raise config_entry_oauth2_flow.ImplementationUnavailableError(
|
||||
"Cannot provide OAuth2 implementation for cloud services. "
|
||||
"Failed to fetch from account link server."
|
||||
) from err
|
||||
|
||||
hass.data[DATA_SERVICES] = services
|
||||
|
||||
|
||||
@@ -14,8 +14,9 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ObjectClassType
|
||||
from .coordinator import ComelitConfigEntry, ComelitVedoSystem
|
||||
from .utils import DeviceType, new_device_listener
|
||||
from .utils import new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -30,7 +31,7 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoBinarySensorEntity(coordinator, device, config_entry.entry_id)
|
||||
|
||||
@@ -37,13 +37,6 @@ USER_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PIN): cv.string})
|
||||
STEP_RECONFIGURE = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST): cv.string,
|
||||
vol.Required(CONF_PORT): cv.port,
|
||||
vol.Optional(CONF_PIN, default=DEFAULT_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str, str]:
|
||||
@@ -175,36 +168,55 @@ class ComelitConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the device."""
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
if not user_input:
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure", data_schema=STEP_RECONFIGURE
|
||||
)
|
||||
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, user_input)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates={CONF_HOST: updated_host}
|
||||
)
|
||||
if user_input is not None:
|
||||
updated_host = user_input[CONF_HOST]
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: updated_host})
|
||||
|
||||
try:
|
||||
data_to_validate = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
CONF_TYPE: reconfigure_entry.data.get(CONF_TYPE, BRIDGE),
|
||||
}
|
||||
await validate_input(self.hass, data_to_validate)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except InvalidPin:
|
||||
errors["base"] = "invalid_pin"
|
||||
except Exception: # noqa: BLE001
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
data_updates = {
|
||||
CONF_HOST: updated_host,
|
||||
CONF_PORT: user_input[CONF_PORT],
|
||||
CONF_PIN: user_input[CONF_PIN],
|
||||
}
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry, data_updates=data_updates
|
||||
)
|
||||
|
||||
schema = vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_HOST, default=reconfigure_entry.data[CONF_HOST]
|
||||
): cv.string,
|
||||
vol.Required(
|
||||
CONF_PORT, default=reconfigure_entry.data[CONF_PORT]
|
||||
): cv.port,
|
||||
vol.Optional(CONF_PIN): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE,
|
||||
data_schema=schema,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,10 +2,20 @@
|
||||
|
||||
import logging
|
||||
|
||||
from aiocomelit.api import (
|
||||
ComelitSerialBridgeObject,
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
)
|
||||
from aiocomelit.const import BRIDGE, VEDO
|
||||
|
||||
_LOGGER = logging.getLogger(__package__)
|
||||
|
||||
ObjectClassType = (
|
||||
ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
|
||||
)
|
||||
|
||||
|
||||
DOMAIN = "comelit"
|
||||
DEFAULT_PORT = 80
|
||||
DEVICE_TYPE_LIST = [BRIDGE, VEDO]
|
||||
|
||||
@@ -10,8 +10,6 @@ from aiocomelit.api import (
|
||||
ComeliteSerialBridgeApi,
|
||||
ComelitSerialBridgeObject,
|
||||
ComelitVedoApi,
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
)
|
||||
from aiocomelit.const import (
|
||||
BRIDGE,
|
||||
@@ -32,7 +30,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, DOMAIN, SCAN_INTERVAL
|
||||
from .const import _LOGGER, DOMAIN, SCAN_INTERVAL, ObjectClassType
|
||||
|
||||
type ComelitConfigEntry = ConfigEntry[ComelitBaseCoordinator]
|
||||
|
||||
@@ -77,9 +75,7 @@ class ComelitBaseCoordinator(DataUpdateCoordinator[T]):
|
||||
|
||||
def platform_device_info(
|
||||
self,
|
||||
object_class: ComelitVedoZoneObject
|
||||
| ComelitVedoAreaObject
|
||||
| ComelitSerialBridgeObject,
|
||||
object_class: ObjectClassType,
|
||||
object_type: str,
|
||||
) -> dr.DeviceInfo:
|
||||
"""Set platform device info."""
|
||||
|
||||
@@ -12,9 +12,10 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
|
||||
from .const import ObjectClassType
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -29,7 +30,7 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitCoverEntity(coordinator, device, config_entry.entry_id)
|
||||
|
||||
@@ -10,9 +10,10 @@ from homeassistant.components.light import ColorMode, LightEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ObjectClassType
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -27,7 +28,7 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitLightEntity(coordinator, device, config_entry.entry_id)
|
||||
|
||||
@@ -18,9 +18,10 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ObjectClassType
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge, ComelitVedoSystem
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, new_device_listener
|
||||
from .utils import new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -66,7 +67,7 @@ async def async_setup_bridge_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitBridgeSensorEntity(
|
||||
@@ -93,7 +94,7 @@ async def async_setup_vedo_entry(
|
||||
|
||||
coordinator = cast(ComelitVedoSystem, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitVedoSensorEntity(
|
||||
|
||||
@@ -11,9 +11,10 @@ from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import ObjectClassType
|
||||
from .coordinator import ComelitConfigEntry, ComelitSerialBridge
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
from .utils import DeviceType, bridge_api_call, new_device_listener
|
||||
from .utils import bridge_api_call, new_device_listener
|
||||
|
||||
# Coordinator is used to centralize the data updates
|
||||
PARALLEL_UPDATES = 0
|
||||
@@ -28,7 +29,7 @@ async def async_setup_entry(
|
||||
|
||||
coordinator = cast(ComelitSerialBridge, config_entry.runtime_data)
|
||||
|
||||
def _add_new_entities(new_devices: list[DeviceType], dev_type: str) -> None:
|
||||
def _add_new_entities(new_devices: list[ObjectClassType], dev_type: str) -> None:
|
||||
"""Add entities for new monitors."""
|
||||
entities = [
|
||||
ComelitSwitchEntity(coordinator, device, config_entry.entry_id)
|
||||
|
||||
@@ -2,13 +2,9 @@
|
||||
|
||||
from collections.abc import Awaitable, Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any, Concatenate
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from aiocomelit.api import (
|
||||
ComelitSerialBridgeObject,
|
||||
ComelitVedoAreaObject,
|
||||
ComelitVedoZoneObject,
|
||||
)
|
||||
from aiocomelit.api import ComelitSerialBridgeObject
|
||||
from aiocomelit.exceptions import CannotAuthenticate, CannotConnect, CannotRetrieveData
|
||||
from aiohttp import ClientSession, CookieJar
|
||||
|
||||
@@ -22,12 +18,10 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
)
|
||||
|
||||
from .const import _LOGGER, DOMAIN
|
||||
from .const import _LOGGER, DOMAIN, ObjectClassType
|
||||
from .coordinator import ComelitBaseCoordinator
|
||||
from .entity import ComelitBridgeBaseEntity
|
||||
|
||||
DeviceType = ComelitSerialBridgeObject | ComelitVedoAreaObject | ComelitVedoZoneObject
|
||||
|
||||
|
||||
async def async_client_session(hass: HomeAssistant) -> ClientSession:
|
||||
"""Return a new aiohttp session."""
|
||||
@@ -126,11 +120,7 @@ def new_device_listener(
|
||||
coordinator: ComelitBaseCoordinator,
|
||||
new_devices_callback: Callable[
|
||||
[
|
||||
list[
|
||||
ComelitSerialBridgeObject
|
||||
| ComelitVedoAreaObject
|
||||
| ComelitVedoZoneObject
|
||||
],
|
||||
list[ObjectClassType],
|
||||
str,
|
||||
],
|
||||
None,
|
||||
@@ -142,10 +132,10 @@ def new_device_listener(
|
||||
|
||||
def _check_devices() -> None:
|
||||
"""Check for new devices and call callback with any new monitors."""
|
||||
if not coordinator.data:
|
||||
return
|
||||
if TYPE_CHECKING:
|
||||
assert coordinator.data
|
||||
|
||||
new_devices: list[DeviceType] = []
|
||||
new_devices: list[ObjectClassType] = []
|
||||
for _id in coordinator.data[data_type]:
|
||||
if _id not in (id_list := known_devices.get(data_type, [])):
|
||||
known_devices.update({data_type: [*id_list, _id]})
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import Literal
|
||||
from typing import Any, Literal
|
||||
|
||||
from hassil.recognize import RecognizeResult
|
||||
import voluptuous as vol
|
||||
@@ -21,6 +21,7 @@ from homeassistant.core import (
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import config_validation as cv, intent
|
||||
from homeassistant.helpers.entity_component import EntityComponent
|
||||
from homeassistant.helpers.reload import async_integration_yaml_config
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
|
||||
@@ -52,6 +53,8 @@ from .const import (
|
||||
DATA_COMPONENT,
|
||||
DOMAIN,
|
||||
HOME_ASSISTANT_AGENT,
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
SERVICE_PROCESS,
|
||||
SERVICE_RELOAD,
|
||||
ConversationEntityFeature,
|
||||
@@ -266,10 +269,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
||||
hass.data[DATA_COMPONENT] = entity_component
|
||||
|
||||
agent_config = config.get(DOMAIN, {})
|
||||
await async_setup_default_agent(
|
||||
hass, entity_component, config_intents=agent_config.get("intents", {})
|
||||
)
|
||||
manager = get_agent_manager(hass)
|
||||
|
||||
hass_config_path = hass.config.path()
|
||||
config_intents = _get_config_intents(config, hass_config_path)
|
||||
manager.update_config_intents(config_intents)
|
||||
|
||||
await async_setup_default_agent(hass, entity_component)
|
||||
|
||||
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
||||
"""Parse text into commands."""
|
||||
@@ -294,9 +300,16 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def handle_reload(service: ServiceCall) -> None:
|
||||
"""Reload intents."""
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
language = service.data.get(ATTR_LANGUAGE)
|
||||
if language is None:
|
||||
conf = await async_integration_yaml_config(hass, DOMAIN)
|
||||
if conf is not None:
|
||||
config_intents = _get_config_intents(conf, hass_config_path)
|
||||
manager.update_config_intents(config_intents)
|
||||
|
||||
agent = manager.default_agent
|
||||
if agent is not None:
|
||||
await agent.async_reload(language=service.data.get(ATTR_LANGUAGE))
|
||||
await agent.async_reload(language=language)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
@@ -313,6 +326,27 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
def _get_config_intents(config: ConfigType, hass_config_path: str) -> dict[str, Any]:
|
||||
"""Return config intents."""
|
||||
intents = config.get(DOMAIN, {}).get("intents", {})
|
||||
return {
|
||||
"intents": {
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in intents.items()
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a config entry."""
|
||||
return await hass.data[DATA_COMPONENT].async_setup_entry(entry)
|
||||
|
||||
@@ -147,6 +147,7 @@ class AgentManager:
|
||||
self.hass = hass
|
||||
self._agents: dict[str, AbstractConversationAgent] = {}
|
||||
self.default_agent: DefaultAgent | None = None
|
||||
self.config_intents: dict[str, Any] = {}
|
||||
self.triggers_details: list[TriggerDetails] = []
|
||||
|
||||
@callback
|
||||
@@ -199,9 +200,16 @@ class AgentManager:
|
||||
|
||||
async def async_setup_default_agent(self, agent: DefaultAgent) -> None:
|
||||
"""Set up the default agent."""
|
||||
agent.update_config_intents(self.config_intents)
|
||||
agent.update_triggers(self.triggers_details)
|
||||
self.default_agent = agent
|
||||
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self.config_intents = intents
|
||||
if self.default_agent is not None:
|
||||
self.default_agent.update_config_intents(intents)
|
||||
|
||||
def register_trigger(self, trigger_details: TriggerDetails) -> CALLBACK_TYPE:
|
||||
"""Register a trigger."""
|
||||
self.triggers_details.append(trigger_details)
|
||||
|
||||
@@ -30,3 +30,7 @@ class ConversationEntityFeature(IntFlag):
|
||||
"""Supported features of the conversation entity."""
|
||||
|
||||
CONTROL = 1
|
||||
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
|
||||
@@ -77,7 +77,12 @@ from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .chat_log import AssistantContent, ChatLog
|
||||
from .const import DOMAIN, ConversationEntityFeature
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
METADATA_CUSTOM_FILE,
|
||||
METADATA_CUSTOM_SENTENCE,
|
||||
ConversationEntityFeature,
|
||||
)
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput, ConversationResult
|
||||
from .trace import ConversationTraceEventType, async_conversation_trace_append
|
||||
@@ -91,8 +96,6 @@ _ENTITY_REGISTRY_UPDATE_FIELDS = ["aliases", "name", "original_name"]
|
||||
|
||||
_DEFAULT_EXPOSED_ATTRIBUTES = {"device_class"}
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
|
||||
|
||||
ERROR_SENTINEL = object()
|
||||
@@ -202,10 +205,9 @@ class IntentCache:
|
||||
async def async_setup_default_agent(
|
||||
hass: HomeAssistant,
|
||||
entity_component: EntityComponent[ConversationEntity],
|
||||
config_intents: dict[str, Any],
|
||||
) -> None:
|
||||
"""Set up entity registry listener for the default agent."""
|
||||
agent = DefaultAgent(hass, config_intents)
|
||||
agent = DefaultAgent(hass)
|
||||
await entity_component.async_add_entities([agent])
|
||||
await get_agent_manager(hass).async_setup_default_agent(agent)
|
||||
|
||||
@@ -230,14 +232,14 @@ class DefaultAgent(ConversationEntity):
|
||||
_attr_name = "Home Assistant"
|
||||
_attr_supported_features = ConversationEntityFeature.CONTROL
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_intents: dict[str, Any]) -> None:
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the default agent."""
|
||||
self.hass = hass
|
||||
self._lang_intents: dict[str, LanguageIntents | object] = {}
|
||||
self._load_intents_lock = asyncio.Lock()
|
||||
|
||||
# intent -> [sentences]
|
||||
self._config_intents: dict[str, Any] = config_intents
|
||||
# Intents from common conversation config
|
||||
self._config_intents: dict[str, Any] = {}
|
||||
|
||||
# Sentences that will trigger a callback (skipping intent recognition)
|
||||
self._triggers_details: list[TriggerDetails] = []
|
||||
@@ -1035,6 +1037,14 @@ class DefaultAgent(ConversationEntity):
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
@callback
|
||||
def update_config_intents(self, intents: dict[str, Any]) -> None:
|
||||
"""Update config intents."""
|
||||
self._config_intents = intents
|
||||
|
||||
# Intents have changed, so we must clear the cache
|
||||
self._intent_cache.clear()
|
||||
|
||||
async def async_prepare(self, language: str | None = None) -> None:
|
||||
"""Load intents for a language."""
|
||||
if language is None:
|
||||
@@ -1159,33 +1169,10 @@ class DefaultAgent(ConversationEntity):
|
||||
custom_sentences_path,
|
||||
)
|
||||
|
||||
# Load sentences from HA config for default language only
|
||||
if self._config_intents and (
|
||||
self.hass.config.language in (language, language_variant)
|
||||
):
|
||||
hass_config_path = self.hass.config.path()
|
||||
merge_dict(
|
||||
intents_dict,
|
||||
{
|
||||
"intents": {
|
||||
intent_name: {
|
||||
"data": [
|
||||
{
|
||||
"sentences": sentences,
|
||||
"metadata": {
|
||||
METADATA_CUSTOM_SENTENCE: True,
|
||||
METADATA_CUSTOM_FILE: hass_config_path,
|
||||
},
|
||||
}
|
||||
]
|
||||
}
|
||||
for intent_name, sentences in self._config_intents.items()
|
||||
}
|
||||
},
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"Loaded intents from configuration.yaml",
|
||||
)
|
||||
merge_dict(
|
||||
intents_dict,
|
||||
self._config_intents,
|
||||
)
|
||||
|
||||
if not intents_dict:
|
||||
return None
|
||||
|
||||
@@ -6,3 +6,5 @@ DEFAULT_PORT = 10102
|
||||
|
||||
CONF_SUPPORTED_MODES = "supported_modes"
|
||||
CONF_SWING_SUPPORT = "swing_support"
|
||||
MAX_RETRIES = 3
|
||||
BACKOFF_BASE_DELAY = 2
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
from pycoolmasternet_async import CoolMasterNet
|
||||
@@ -12,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import BACKOFF_BASE_DELAY, DOMAIN, MAX_RETRIES
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -46,7 +47,34 @@ class CoolmasterDataUpdateCoordinator(
|
||||
|
||||
async def _async_update_data(self) -> dict[str, CoolMasterNetUnit]:
|
||||
"""Fetch data from Coolmaster."""
|
||||
try:
|
||||
return await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
raise UpdateFailed from error
|
||||
retries_left = MAX_RETRIES
|
||||
status: dict[str, CoolMasterNetUnit] = {}
|
||||
while retries_left > 0 and not status:
|
||||
retries_left -= 1
|
||||
try:
|
||||
status = await self._coolmaster.status()
|
||||
except OSError as error:
|
||||
if retries_left == 0:
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): {error}"
|
||||
) from error
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster (%d retries left): %s",
|
||||
retries_left,
|
||||
str(error),
|
||||
)
|
||||
else:
|
||||
if status:
|
||||
return status
|
||||
|
||||
_LOGGER.debug(
|
||||
"Error communicating with coolmaster: empty status received (%d retries left)",
|
||||
retries_left,
|
||||
)
|
||||
|
||||
backoff = BACKOFF_BASE_DELAY ** (MAX_RETRIES - retries_left)
|
||||
await asyncio.sleep(backoff)
|
||||
|
||||
raise UpdateFailed(
|
||||
f"Error communicating with Coolmaster (aborting after {MAX_RETRIES} retries): empty status received"
|
||||
)
|
||||
|
||||
@@ -136,5 +136,75 @@
|
||||
"name": "Toggle tilt"
|
||||
}
|
||||
},
|
||||
"title": "Cover"
|
||||
"title": "Cover",
|
||||
"triggers": {
|
||||
"opens": {
|
||||
"description": "Triggers when a cover opens.",
|
||||
"description_configured": "Triggers when a cover opens",
|
||||
"fields": {
|
||||
"fully_opened": {
|
||||
"description": "Only trigger when the cover is fully opened (position at 100%).",
|
||||
"name": "Fully opened"
|
||||
},
|
||||
"device_class": {
|
||||
"description": "The device classes to trigger on. If empty, triggers on all device classes.",
|
||||
"name": "Device classes"
|
||||
}
|
||||
},
|
||||
"name": "When a cover opens"
|
||||
},
|
||||
"closes": {
|
||||
"description": "Triggers when a cover closes.",
|
||||
"description_configured": "Triggers when a cover closes",
|
||||
"fields": {
|
||||
"fully_closed": {
|
||||
"description": "Only trigger when the cover is fully closed (position at 0%).",
|
||||
"name": "Fully closed"
|
||||
},
|
||||
"device_class": {
|
||||
"description": "The device classes to trigger on. If empty, triggers on all device classes.",
|
||||
"name": "Device classes"
|
||||
}
|
||||
},
|
||||
"name": "When a cover closes"
|
||||
},
|
||||
"stops": {
|
||||
"description": "Triggers when a cover stops moving.",
|
||||
"description_configured": "Triggers when a cover stops moving",
|
||||
"fields": {
|
||||
"device_class": {
|
||||
"description": "The device classes to trigger on. If empty, triggers on all device classes.",
|
||||
"name": "Device classes"
|
||||
}
|
||||
},
|
||||
"name": "When a cover stops moving"
|
||||
},
|
||||
"position_changed": {
|
||||
"description": "Triggers when the position of a cover changes.",
|
||||
"description_configured": "Triggers when the position of a cover changes",
|
||||
"fields": {
|
||||
"lower": {
|
||||
"description": "The minimum position value to trigger on. Only triggers when position is at or above this value.",
|
||||
"name": "Lower limit"
|
||||
},
|
||||
"upper": {
|
||||
"description": "The maximum position value to trigger on. Only triggers when position is at or below this value.",
|
||||
"name": "Upper limit"
|
||||
},
|
||||
"above": {
|
||||
"description": "Only trigger when position is above this value.",
|
||||
"name": "Above"
|
||||
},
|
||||
"below": {
|
||||
"description": "Only trigger when position is below this value.",
|
||||
"name": "Below"
|
||||
},
|
||||
"device_class": {
|
||||
"description": "The device classes to trigger on. If empty, triggers on all device classes.",
|
||||
"name": "Device classes"
|
||||
}
|
||||
},
|
||||
"name": "When the position of a cover changes"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
453
homeassistant/components/cover/trigger.py
Normal file
453
homeassistant/components/cover/trigger.py
Normal file
@@ -0,0 +1,453 @@
|
||||
"""Provides triggers for covers."""
|
||||
|
||||
from typing import TYPE_CHECKING, cast, override
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
CONF_DEVICE_CLASS,
|
||||
CONF_OPTIONS,
|
||||
CONF_TARGET,
|
||||
STATE_UNAVAILABLE,
|
||||
)
|
||||
from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback, split_entity_id
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.target import (
|
||||
TargetStateChangedData,
|
||||
async_track_target_selector_state_change_event,
|
||||
)
|
||||
from homeassistant.helpers.trigger import Trigger, TriggerActionRunner, TriggerConfig
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from . import ATTR_CURRENT_POSITION, CoverDeviceClass, CoverState
|
||||
from .const import DOMAIN
|
||||
|
||||
CONF_LOWER = "lower"
|
||||
CONF_UPPER = "upper"
|
||||
CONF_ABOVE = "above"
|
||||
CONF_BELOW = "below"
|
||||
CONF_FULLY_OPENED = "fully_opened"
|
||||
CONF_FULLY_CLOSED = "fully_closed"
|
||||
|
||||
OPENS_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {
|
||||
vol.Optional(CONF_FULLY_OPENED, default=False): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_CLASS, default=[]): vol.All(
|
||||
cv.ensure_list, [vol.Coerce(CoverDeviceClass)]
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
CLOSES_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {
|
||||
vol.Optional(CONF_FULLY_CLOSED, default=False): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_CLASS, default=[]): vol.All(
|
||||
cv.ensure_list, [vol.Coerce(CoverDeviceClass)]
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
STOPS_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {
|
||||
vol.Optional(CONF_DEVICE_CLASS, default=[]): vol.All(
|
||||
cv.ensure_list, [vol.Coerce(CoverDeviceClass)]
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
POSITION_CHANGED_TRIGGER_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_OPTIONS, default={}): {
|
||||
vol.Exclusive(CONF_LOWER, "position_range"): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0, max=100)
|
||||
),
|
||||
vol.Exclusive(CONF_UPPER, "position_range"): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0, max=100)
|
||||
),
|
||||
vol.Exclusive(CONF_ABOVE, "position_range"): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0, max=100)
|
||||
),
|
||||
vol.Exclusive(CONF_BELOW, "position_range"): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=0, max=100)
|
||||
),
|
||||
vol.Optional(CONF_DEVICE_CLASS, default=[]): vol.All(
|
||||
cv.ensure_list, [vol.Coerce(CoverDeviceClass)]
|
||||
),
|
||||
},
|
||||
vol.Required(CONF_TARGET): cv.TARGET_FIELDS,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CoverOpensTrigger(Trigger):
|
||||
"""Trigger for when a cover opens."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, OPENS_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the cover opens trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
assert config.options is not None
|
||||
self._target = config.target
|
||||
self._options = config.options
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
fully_opened = self._options[CONF_FULLY_OPENED]
|
||||
device_classes_filter = self._options[CONF_DEVICE_CLASS]
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Filter by device class if specified
|
||||
if device_classes_filter:
|
||||
device_class = to_state.attributes.get(CONF_DEVICE_CLASS)
|
||||
if device_class not in device_classes_filter:
|
||||
return
|
||||
|
||||
# Trigger when cover opens or is opening
|
||||
if to_state.state in (CoverState.OPEN, CoverState.OPENING):
|
||||
# If fully_opened is True, only trigger when position reaches 100
|
||||
if fully_opened:
|
||||
current_position = to_state.attributes.get(ATTR_CURRENT_POSITION)
|
||||
if current_position != 100:
|
||||
return
|
||||
|
||||
# Only trigger on state change, not if already in that state
|
||||
if from_state and from_state.state == to_state.state:
|
||||
# For fully_opened, allow triggering when position changes to 100
|
||||
if fully_opened:
|
||||
from_position = from_state.attributes.get(ATTR_CURRENT_POSITION)
|
||||
to_position = to_state.attributes.get(ATTR_CURRENT_POSITION)
|
||||
if from_position == to_position:
|
||||
return
|
||||
else:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"cover opened on {entity_id}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class CoverClosesTrigger(Trigger):
|
||||
"""Trigger for when a cover closes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, CLOSES_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the cover closes trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
assert config.options is not None
|
||||
self._target = config.target
|
||||
self._options = config.options
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
fully_closed = self._options[CONF_FULLY_CLOSED]
|
||||
device_classes_filter = self._options[CONF_DEVICE_CLASS]
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Filter by device class if specified
|
||||
if device_classes_filter:
|
||||
device_class = to_state.attributes.get(CONF_DEVICE_CLASS)
|
||||
if device_class not in device_classes_filter:
|
||||
return
|
||||
|
||||
# Trigger when cover closes or is closing
|
||||
if to_state.state in (CoverState.CLOSED, CoverState.CLOSING):
|
||||
# If fully_closed is True, only trigger when position reaches 0
|
||||
if fully_closed:
|
||||
current_position = to_state.attributes.get(ATTR_CURRENT_POSITION)
|
||||
if current_position != 0:
|
||||
return
|
||||
|
||||
# Only trigger on state change, not if already in that state
|
||||
if from_state and from_state.state == to_state.state:
|
||||
# For fully_closed, allow triggering when position changes to 0
|
||||
if fully_closed:
|
||||
from_position = from_state.attributes.get(ATTR_CURRENT_POSITION)
|
||||
to_position = to_state.attributes.get(ATTR_CURRENT_POSITION)
|
||||
if from_position == to_position:
|
||||
return
|
||||
else:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"cover closed on {entity_id}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class CoverStopsTrigger(Trigger):
|
||||
"""Trigger for when a cover stops moving."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, STOPS_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the cover stops trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
assert config.options is not None
|
||||
self._target = config.target
|
||||
self._options = config.options
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
device_classes_filter = self._options[CONF_DEVICE_CLASS]
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Filter by device class if specified
|
||||
if device_classes_filter:
|
||||
device_class = to_state.attributes.get(CONF_DEVICE_CLASS)
|
||||
if device_class not in device_classes_filter:
|
||||
return
|
||||
|
||||
# Trigger when cover stops (from opening/closing to open/closed)
|
||||
if from_state and from_state.state in (
|
||||
CoverState.OPENING,
|
||||
CoverState.CLOSING,
|
||||
):
|
||||
if to_state.state in (CoverState.OPEN, CoverState.CLOSED):
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
},
|
||||
f"cover stopped on {entity_id}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
class CoverPositionChangedTrigger(Trigger):
|
||||
"""Trigger for when a cover's position changes."""
|
||||
|
||||
@override
|
||||
@classmethod
|
||||
async def async_validate_config(
|
||||
cls, hass: HomeAssistant, config: ConfigType
|
||||
) -> ConfigType:
|
||||
"""Validate config."""
|
||||
return cast(ConfigType, POSITION_CHANGED_TRIGGER_SCHEMA(config))
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the cover position changed trigger."""
|
||||
super().__init__(hass, config)
|
||||
if TYPE_CHECKING:
|
||||
assert config.target is not None
|
||||
self._target = config.target
|
||||
self._options = config.options or {}
|
||||
|
||||
@override
|
||||
async def async_attach_runner(
|
||||
self, run_action: TriggerActionRunner
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Attach the trigger to an action runner."""
|
||||
lower_limit = self._options.get(CONF_LOWER)
|
||||
upper_limit = self._options.get(CONF_UPPER)
|
||||
above_limit = self._options.get(CONF_ABOVE)
|
||||
below_limit = self._options.get(CONF_BELOW)
|
||||
device_classes_filter = self._options.get(CONF_DEVICE_CLASS, [])
|
||||
|
||||
@callback
|
||||
def state_change_listener(
|
||||
target_state_change_data: TargetStateChangedData,
|
||||
) -> None:
|
||||
"""Listen for state changes and call action."""
|
||||
event = target_state_change_data.state_change_event
|
||||
entity_id = event.data["entity_id"]
|
||||
from_state = event.data["old_state"]
|
||||
to_state = event.data["new_state"]
|
||||
|
||||
# Ignore unavailable states
|
||||
if to_state is None or to_state.state == STATE_UNAVAILABLE:
|
||||
return
|
||||
|
||||
# Filter by device class if specified
|
||||
if device_classes_filter:
|
||||
device_class = to_state.attributes.get(CONF_DEVICE_CLASS)
|
||||
if device_class not in device_classes_filter:
|
||||
return
|
||||
|
||||
# Get position values
|
||||
from_position = (
|
||||
from_state.attributes.get(ATTR_CURRENT_POSITION) if from_state else None
|
||||
)
|
||||
to_position = to_state.attributes.get(ATTR_CURRENT_POSITION)
|
||||
|
||||
# Only trigger if position value exists and has changed
|
||||
if to_position is None or from_position == to_position:
|
||||
return
|
||||
|
||||
# Apply threshold filters if configured
|
||||
if lower_limit is not None and to_position < lower_limit:
|
||||
return
|
||||
if upper_limit is not None and to_position > upper_limit:
|
||||
return
|
||||
if above_limit is not None and to_position <= above_limit:
|
||||
return
|
||||
if below_limit is not None and to_position >= below_limit:
|
||||
return
|
||||
|
||||
run_action(
|
||||
{
|
||||
ATTR_ENTITY_ID: entity_id,
|
||||
"from_state": from_state,
|
||||
"to_state": to_state,
|
||||
"from_position": from_position,
|
||||
"to_position": to_position,
|
||||
},
|
||||
f"position changed on {entity_id}",
|
||||
event.context,
|
||||
)
|
||||
|
||||
def entity_filter(entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if split_entity_id(entity_id)[0] == DOMAIN
|
||||
}
|
||||
|
||||
return async_track_target_selector_state_change_event(
|
||||
self._hass, self._target, state_change_listener, entity_filter
|
||||
)
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"opens": CoverOpensTrigger,
|
||||
"closes": CoverClosesTrigger,
|
||||
"stops": CoverStopsTrigger,
|
||||
"position_changed": CoverPositionChangedTrigger,
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for covers."""
|
||||
return TRIGGERS
|
||||
101
homeassistant/components/cover/triggers.yaml
Normal file
101
homeassistant/components/cover/triggers.yaml
Normal file
@@ -0,0 +1,101 @@
|
||||
opens:
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
fields:
|
||||
fully_opened:
|
||||
required: false
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
device_class:
|
||||
required: false
|
||||
default: []
|
||||
selector:
|
||||
select:
|
||||
multiple: true
|
||||
options:
|
||||
- curtain
|
||||
- shutter
|
||||
- blind
|
||||
|
||||
closes:
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
fields:
|
||||
fully_closed:
|
||||
required: false
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
device_class:
|
||||
required: false
|
||||
default: []
|
||||
selector:
|
||||
select:
|
||||
multiple: true
|
||||
options:
|
||||
- curtain
|
||||
- shutter
|
||||
- blind
|
||||
|
||||
stops:
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
fields:
|
||||
device_class:
|
||||
required: false
|
||||
default: []
|
||||
selector:
|
||||
select:
|
||||
multiple: true
|
||||
options:
|
||||
- curtain
|
||||
- shutter
|
||||
- blind
|
||||
|
||||
position_changed:
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
fields:
|
||||
lower:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 100
|
||||
mode: box
|
||||
upper:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 100
|
||||
mode: box
|
||||
above:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 100
|
||||
mode: box
|
||||
below:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
max: 100
|
||||
mode: box
|
||||
device_class:
|
||||
required: false
|
||||
default: []
|
||||
selector:
|
||||
select:
|
||||
multiple: true
|
||||
options:
|
||||
- curtain
|
||||
- shutter
|
||||
- blind
|
||||
@@ -1,4 +0,0 @@
|
||||
"""The cups component."""
|
||||
|
||||
DOMAIN = "cups"
|
||||
CONF_PRINTERS = "printers"
|
||||
@@ -1,9 +0,0 @@
|
||||
{
|
||||
"domain": "cups",
|
||||
"name": "CUPS",
|
||||
"codeowners": ["@fabaff"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/cups",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["pycups==2.0.4"]
|
||||
}
|
||||
@@ -1,349 +0,0 @@
|
||||
"""Details about printers which are connected to CUPS."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import importlib
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_PORT, PERCENTAGE
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.exceptions import PlatformNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import CONF_PRINTERS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_MARKER_TYPE = "marker_type"
|
||||
ATTR_MARKER_LOW_LEVEL = "marker_low_level"
|
||||
ATTR_MARKER_HIGH_LEVEL = "marker_high_level"
|
||||
ATTR_PRINTER_NAME = "printer_name"
|
||||
ATTR_DEVICE_URI = "device_uri"
|
||||
ATTR_PRINTER_INFO = "printer_info"
|
||||
ATTR_PRINTER_IS_SHARED = "printer_is_shared"
|
||||
ATTR_PRINTER_LOCATION = "printer_location"
|
||||
ATTR_PRINTER_MODEL = "printer_model"
|
||||
ATTR_PRINTER_STATE_MESSAGE = "printer_state_message"
|
||||
ATTR_PRINTER_STATE_REASON = "printer_state_reason"
|
||||
ATTR_PRINTER_TYPE = "printer_type"
|
||||
ATTR_PRINTER_URI_SUPPORTED = "printer_uri_supported"
|
||||
|
||||
CONF_IS_CUPS_SERVER = "is_cups_server"
|
||||
|
||||
DEFAULT_HOST = "127.0.0.1"
|
||||
DEFAULT_PORT = 631
|
||||
DEFAULT_IS_CUPS_SERVER = True
|
||||
|
||||
ICON_PRINTER = "mdi:printer"
|
||||
ICON_MARKER = "mdi:water"
|
||||
|
||||
SCAN_INTERVAL = timedelta(minutes=1)
|
||||
|
||||
PRINTER_STATES = {3: "idle", 4: "printing", 5: "stopped"}
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_PRINTERS): vol.All(cv.ensure_list, [cv.string]),
|
||||
vol.Optional(CONF_IS_CUPS_SERVER, default=DEFAULT_IS_CUPS_SERVER): cv.boolean,
|
||||
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
|
||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the CUPS sensor."""
|
||||
host: str = config[CONF_HOST]
|
||||
port: int = config[CONF_PORT]
|
||||
printers: list[str] = config[CONF_PRINTERS]
|
||||
is_cups: bool = config[CONF_IS_CUPS_SERVER]
|
||||
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "CUPS",
|
||||
},
|
||||
)
|
||||
|
||||
if is_cups:
|
||||
data = CupsData(host, port, None)
|
||||
data.update()
|
||||
if data.available is False:
|
||||
_LOGGER.error("Unable to connect to CUPS server: %s:%s", host, port)
|
||||
raise PlatformNotReady
|
||||
assert data.printers is not None
|
||||
|
||||
dev: list[SensorEntity] = []
|
||||
for printer in printers:
|
||||
if printer not in data.printers:
|
||||
_LOGGER.error("Printer is not present: %s", printer)
|
||||
continue
|
||||
dev.append(CupsSensor(data, printer))
|
||||
|
||||
if "marker-names" in data.attributes[printer]:
|
||||
dev.extend(
|
||||
MarkerSensor(data, printer, marker, True)
|
||||
for marker in data.attributes[printer]["marker-names"]
|
||||
)
|
||||
|
||||
add_entities(dev, True)
|
||||
return
|
||||
|
||||
data = CupsData(host, port, printers)
|
||||
data.update()
|
||||
if data.available is False:
|
||||
_LOGGER.error("Unable to connect to IPP printer: %s:%s", host, port)
|
||||
raise PlatformNotReady
|
||||
|
||||
dev = []
|
||||
for printer in printers:
|
||||
dev.append(IPPSensor(data, printer))
|
||||
|
||||
if "marker-names" in data.attributes[printer]:
|
||||
for marker in data.attributes[printer]["marker-names"]:
|
||||
dev.append(MarkerSensor(data, printer, marker, False))
|
||||
|
||||
add_entities(dev, True)
|
||||
|
||||
|
||||
class CupsSensor(SensorEntity):
|
||||
"""Representation of a CUPS sensor."""
|
||||
|
||||
_attr_icon = ICON_PRINTER
|
||||
|
||||
def __init__(self, data: CupsData, printer_name: str) -> None:
|
||||
"""Initialize the CUPS sensor."""
|
||||
self.data = data
|
||||
self._name = printer_name
|
||||
self._printer: dict[str, Any] | None = None
|
||||
self._attr_available = False
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return the name of the entity."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
if self._printer is None:
|
||||
return None
|
||||
|
||||
key = self._printer["printer-state"]
|
||||
return PRINTER_STATES.get(key, key)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
if self._printer is None:
|
||||
return None
|
||||
|
||||
return {
|
||||
ATTR_DEVICE_URI: self._printer["device-uri"],
|
||||
ATTR_PRINTER_INFO: self._printer["printer-info"],
|
||||
ATTR_PRINTER_IS_SHARED: self._printer["printer-is-shared"],
|
||||
ATTR_PRINTER_LOCATION: self._printer["printer-location"],
|
||||
ATTR_PRINTER_MODEL: self._printer["printer-make-and-model"],
|
||||
ATTR_PRINTER_STATE_MESSAGE: self._printer["printer-state-message"],
|
||||
ATTR_PRINTER_STATE_REASON: self._printer["printer-state-reasons"],
|
||||
ATTR_PRINTER_TYPE: self._printer["printer-type"],
|
||||
ATTR_PRINTER_URI_SUPPORTED: self._printer["printer-uri-supported"],
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data and updates the states."""
|
||||
self.data.update()
|
||||
assert self.data.printers is not None
|
||||
self._printer = self.data.printers.get(self.name)
|
||||
self._attr_available = self.data.available
|
||||
|
||||
|
||||
class IPPSensor(SensorEntity):
|
||||
"""Implementation of the IPPSensor.
|
||||
|
||||
This sensor represents the status of the printer.
|
||||
"""
|
||||
|
||||
_attr_icon = ICON_PRINTER
|
||||
|
||||
def __init__(self, data: CupsData, printer_name: str) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.data = data
|
||||
self._printer_name = printer_name
|
||||
self._attributes = None
|
||||
self._attr_available = False
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Return the name of the sensor."""
|
||||
return self._attributes["printer-make-and-model"]
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
if self._attributes is None:
|
||||
return None
|
||||
|
||||
key = self._attributes["printer-state"]
|
||||
return PRINTER_STATES.get(key, key)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
if self._attributes is None:
|
||||
return None
|
||||
|
||||
state_attributes = {}
|
||||
|
||||
if "printer-info" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_INFO] = self._attributes["printer-info"]
|
||||
|
||||
if "printer-location" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_LOCATION] = self._attributes[
|
||||
"printer-location"
|
||||
]
|
||||
|
||||
if "printer-state-message" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_STATE_MESSAGE] = self._attributes[
|
||||
"printer-state-message"
|
||||
]
|
||||
|
||||
if "printer-state-reasons" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_STATE_REASON] = self._attributes[
|
||||
"printer-state-reasons"
|
||||
]
|
||||
|
||||
if "printer-uri-supported" in self._attributes:
|
||||
state_attributes[ATTR_PRINTER_URI_SUPPORTED] = self._attributes[
|
||||
"printer-uri-supported"
|
||||
]
|
||||
|
||||
return state_attributes
|
||||
|
||||
def update(self) -> None:
|
||||
"""Fetch new state data for the sensor."""
|
||||
self.data.update()
|
||||
self._attributes = self.data.attributes.get(self._printer_name)
|
||||
self._attr_available = self.data.available
|
||||
|
||||
|
||||
class MarkerSensor(SensorEntity):
|
||||
"""Implementation of the MarkerSensor.
|
||||
|
||||
This sensor represents the percentage of ink or toner.
|
||||
"""
|
||||
|
||||
_attr_icon = ICON_MARKER
|
||||
_attr_native_unit_of_measurement = PERCENTAGE
|
||||
|
||||
def __init__(self, data: CupsData, printer: str, name: str, is_cups: bool) -> None:
|
||||
"""Initialize the sensor."""
|
||||
self.data = data
|
||||
self._attr_name = name
|
||||
self._printer = printer
|
||||
self._index = data.attributes[printer]["marker-names"].index(name)
|
||||
self._is_cups = is_cups
|
||||
self._attributes: dict[str, Any] | None = None
|
||||
|
||||
@property
|
||||
def native_value(self):
|
||||
"""Return the state of the sensor."""
|
||||
if self._attributes is None:
|
||||
return None
|
||||
|
||||
return self._attributes[self._printer]["marker-levels"][self._index]
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self):
|
||||
"""Return the state attributes of the sensor."""
|
||||
if self._attributes is None:
|
||||
return None
|
||||
|
||||
high_level = self._attributes[self._printer].get("marker-high-levels")
|
||||
if isinstance(high_level, list):
|
||||
high_level = high_level[self._index]
|
||||
|
||||
low_level = self._attributes[self._printer].get("marker-low-levels")
|
||||
if isinstance(low_level, list):
|
||||
low_level = low_level[self._index]
|
||||
|
||||
marker_types = self._attributes[self._printer]["marker-types"]
|
||||
if isinstance(marker_types, list):
|
||||
marker_types = marker_types[self._index]
|
||||
|
||||
if self._is_cups:
|
||||
printer_name = self._printer
|
||||
else:
|
||||
printer_name = self._attributes[self._printer]["printer-make-and-model"]
|
||||
|
||||
return {
|
||||
ATTR_MARKER_HIGH_LEVEL: high_level,
|
||||
ATTR_MARKER_LOW_LEVEL: low_level,
|
||||
ATTR_MARKER_TYPE: marker_types,
|
||||
ATTR_PRINTER_NAME: printer_name,
|
||||
}
|
||||
|
||||
def update(self) -> None:
|
||||
"""Update the state of the sensor."""
|
||||
# Data fetching is done by CupsSensor/IPPSensor
|
||||
self._attributes = self.data.attributes
|
||||
|
||||
|
||||
class CupsData:
|
||||
"""Get the latest data from CUPS and update the state."""
|
||||
|
||||
def __init__(self, host: str, port: int, ipp_printers: list[str] | None) -> None:
|
||||
"""Initialize the data object."""
|
||||
self._host = host
|
||||
self._port = port
|
||||
self._ipp_printers = ipp_printers
|
||||
self.is_cups = ipp_printers is None
|
||||
self.printers: dict[str, dict[str, Any]] | None = None
|
||||
self.attributes: dict[str, Any] = {}
|
||||
self.available = False
|
||||
|
||||
def update(self) -> None:
|
||||
"""Get the latest data from CUPS."""
|
||||
cups = importlib.import_module("cups")
|
||||
|
||||
try:
|
||||
conn = cups.Connection(host=self._host, port=self._port)
|
||||
if self.is_cups:
|
||||
self.printers = conn.getPrinters()
|
||||
assert self.printers is not None
|
||||
for printer in self.printers:
|
||||
self.attributes[printer] = conn.getPrinterAttributes(name=printer)
|
||||
else:
|
||||
assert self._ipp_printers is not None
|
||||
for ipp_printer in self._ipp_printers:
|
||||
self.attributes[ipp_printer] = conn.getPrinterAttributes(
|
||||
uri=f"ipp://{self._host}:{self._port}/{ipp_printer}"
|
||||
)
|
||||
|
||||
self.available = True
|
||||
except RuntimeError:
|
||||
self.available = False
|
||||
@@ -1,3 +0,0 @@
|
||||
"""The decora component."""
|
||||
|
||||
DOMAIN = "decora"
|
||||
@@ -1,166 +0,0 @@
|
||||
"""Support for Decora dimmers."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import copy
|
||||
from functools import wraps
|
||||
import logging
|
||||
import time
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
from bluepy.btle import BTLEException
|
||||
import decora
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant import util
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
PLATFORM_SCHEMA as LIGHT_PLATFORM_SCHEMA,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_DEVICES, CONF_NAME
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _name_validator(config):
|
||||
"""Validate the name."""
|
||||
config = copy.deepcopy(config)
|
||||
for address, device_config in config[CONF_DEVICES].items():
|
||||
if CONF_NAME not in device_config:
|
||||
device_config[CONF_NAME] = util.slugify(address)
|
||||
|
||||
return config
|
||||
|
||||
|
||||
DEVICE_SCHEMA = vol.Schema(
|
||||
{vol.Optional(CONF_NAME): cv.string, vol.Required(CONF_API_KEY): cv.string}
|
||||
)
|
||||
|
||||
PLATFORM_SCHEMA = vol.Schema(
|
||||
vol.All(
|
||||
LIGHT_PLATFORM_SCHEMA.extend(
|
||||
{vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA}}
|
||||
),
|
||||
_name_validator,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def retry[_DecoraLightT: DecoraLight, **_P, _R](
|
||||
method: Callable[Concatenate[_DecoraLightT, _P], _R],
|
||||
) -> Callable[Concatenate[_DecoraLightT, _P], _R | None]:
|
||||
"""Retry bluetooth commands."""
|
||||
|
||||
@wraps(method)
|
||||
def wrapper_retry(
|
||||
device: _DecoraLightT, *args: _P.args, **kwargs: _P.kwargs
|
||||
) -> _R | None:
|
||||
"""Try send command and retry on error."""
|
||||
|
||||
initial = time.monotonic()
|
||||
while True:
|
||||
if time.monotonic() - initial >= 10:
|
||||
return None
|
||||
try:
|
||||
return method(device, *args, **kwargs)
|
||||
except (decora.decoraException, AttributeError, BTLEException):
|
||||
_LOGGER.warning(
|
||||
"Decora connect error for device %s. Reconnecting",
|
||||
device.name,
|
||||
)
|
||||
device._switch.connect() # noqa: SLF001
|
||||
|
||||
return wrapper_retry
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up an Decora switch."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Leviton Decora",
|
||||
},
|
||||
)
|
||||
|
||||
lights = []
|
||||
for address, device_config in config[CONF_DEVICES].items():
|
||||
device = {}
|
||||
device["name"] = device_config[CONF_NAME]
|
||||
device["key"] = device_config[CONF_API_KEY]
|
||||
device["address"] = address
|
||||
light = DecoraLight(device)
|
||||
lights.append(light)
|
||||
|
||||
add_entities(lights)
|
||||
|
||||
|
||||
class DecoraLight(LightEntity):
|
||||
"""Representation of an Decora light."""
|
||||
|
||||
_attr_color_mode = ColorMode.BRIGHTNESS
|
||||
_attr_supported_color_modes = {ColorMode.BRIGHTNESS}
|
||||
|
||||
def __init__(self, device: dict[str, Any]) -> None:
|
||||
"""Initialize the light."""
|
||||
|
||||
self._attr_name = device["name"]
|
||||
self._attr_unique_id = device["address"]
|
||||
self._key = device["key"]
|
||||
self._switch = decora.decora(device["address"], self._key)
|
||||
self._attr_brightness = 0
|
||||
self._attr_is_on = False
|
||||
|
||||
@retry
|
||||
def set_state(self, brightness: int) -> None:
|
||||
"""Set the state of this lamp to the provided brightness."""
|
||||
self._switch.set_brightness(int(brightness / 2.55))
|
||||
self._attr_brightness = brightness
|
||||
|
||||
@retry
|
||||
def turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the specified or all lights on."""
|
||||
brightness = kwargs.get(ATTR_BRIGHTNESS)
|
||||
self._switch.on()
|
||||
self._attr_is_on = True
|
||||
|
||||
if brightness is not None:
|
||||
self.set_state(brightness)
|
||||
|
||||
@retry
|
||||
def turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the specified or all lights off."""
|
||||
self._switch.off()
|
||||
self._attr_is_on = False
|
||||
|
||||
@retry
|
||||
def update(self) -> None:
|
||||
"""Synchronise internal state with the actual light state."""
|
||||
self._attr_brightness = self._switch.get_brightness() * 2.55
|
||||
self._attr_is_on = self._switch.get_on()
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "decora",
|
||||
"name": "Leviton Decora",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/decora",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bluepy", "decora"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["bluepy==1.3.0", "decora==0.6"]
|
||||
}
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
from functools import partial
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from devolo_home_control_api.exceptions.gateway import GatewayOfflineError
|
||||
@@ -22,6 +23,8 @@ from .const import DOMAIN, PLATFORMS
|
||||
|
||||
type DevoloHomeControlConfigEntry = ConfigEntry[list[HomeControl]]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: DevoloHomeControlConfigEntry
|
||||
@@ -44,26 +47,29 @@ async def async_setup_entry(
|
||||
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shutdown)
|
||||
)
|
||||
|
||||
try:
|
||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||
entry.runtime_data = []
|
||||
for gateway_id in gateway_ids:
|
||||
zeroconf_instance = await zeroconf.async_get_instance(hass)
|
||||
entry.runtime_data = []
|
||||
offline_gateways = 0
|
||||
for gateway_id in gateway_ids:
|
||||
try:
|
||||
entry.runtime_data.append(
|
||||
await hass.async_add_executor_job(
|
||||
partial(
|
||||
HomeControl,
|
||||
gateway_id=str(gateway_id),
|
||||
gateway_id=gateway_id,
|
||||
mydevolo_instance=mydevolo,
|
||||
zeroconf_instance=zeroconf_instance,
|
||||
)
|
||||
)
|
||||
)
|
||||
except GatewayOfflineError as err:
|
||||
except GatewayOfflineError:
|
||||
offline_gateways += 1
|
||||
_LOGGER.info("Central unit %s cannot be reached locally", gateway_id)
|
||||
if len(gateway_ids) == offline_gateways:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_failed",
|
||||
translation_placeholders={"gateway_id": gateway_id},
|
||||
) from err
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/devolo_home_control",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["devolo_home_control_api"],
|
||||
"loggers": ["HomeControl", "Mydevolo", "MprmRest", "MprmWebsocket", "Mprm"],
|
||||
"requirements": ["devolo-home-control-api==0.19.0"],
|
||||
"zeroconf": ["_dvl-deviceapi._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -58,7 +58,7 @@
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_failed": {
|
||||
"message": "Failed to connect to devolo Home Control central unit {gateway_id}."
|
||||
"message": "Failed to connect to any devolo Home Control central unit."
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "Authentication failed. Please re-authenticate with your mydevolo account."
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
"""The dlib_face_detect component."""
|
||||
|
||||
DOMAIN = "dlib_face_detect"
|
||||
@@ -1,82 +0,0 @@
|
||||
"""Component that will help set the Dlib face detect processing."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
|
||||
import face_recognition
|
||||
|
||||
from homeassistant.components.image_processing import (
|
||||
PLATFORM_SCHEMA as IMAGE_PROCESSING_PLATFORM_SCHEMA,
|
||||
ImageProcessingFaceEntity,
|
||||
)
|
||||
from homeassistant.const import ATTR_LOCATION, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE
|
||||
from homeassistant.core import (
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
HomeAssistant,
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
PLATFORM_SCHEMA = IMAGE_PROCESSING_PLATFORM_SCHEMA
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Dlib Face detection platform."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Dlib Face Detect",
|
||||
},
|
||||
)
|
||||
source: list[dict[str, str]] = config[CONF_SOURCE]
|
||||
add_entities(
|
||||
DlibFaceDetectEntity(camera[CONF_ENTITY_ID], camera.get(CONF_NAME))
|
||||
for camera in source
|
||||
)
|
||||
|
||||
|
||||
class DlibFaceDetectEntity(ImageProcessingFaceEntity):
|
||||
"""Dlib Face API entity for identify."""
|
||||
|
||||
def __init__(self, camera_entity: str, name: str | None) -> None:
|
||||
"""Initialize Dlib face entity."""
|
||||
super().__init__()
|
||||
|
||||
self._attr_camera_entity = camera_entity
|
||||
|
||||
if name:
|
||||
self._attr_name = name
|
||||
else:
|
||||
self._attr_name = f"Dlib Face {split_entity_id(camera_entity)[1]}"
|
||||
|
||||
def process_image(self, image: bytes) -> None:
|
||||
"""Process image."""
|
||||
|
||||
fak_file = io.BytesIO(image)
|
||||
fak_file.name = "snapshot.jpg"
|
||||
fak_file.seek(0)
|
||||
|
||||
image = face_recognition.load_image_file(fak_file)
|
||||
face_locations = face_recognition.face_locations(image)
|
||||
|
||||
face_locations = [{ATTR_LOCATION: location} for location in face_locations]
|
||||
|
||||
self.process_faces(face_locations, len(face_locations))
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "dlib_face_detect",
|
||||
"name": "Dlib Face Detect",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlib_face_detect",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["face_recognition"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["face-recognition==1.2.3"]
|
||||
}
|
||||
@@ -1,4 +0,0 @@
|
||||
"""The dlib_face_identify component."""
|
||||
|
||||
CONF_FACES = "faces"
|
||||
DOMAIN = "dlib_face_identify"
|
||||
@@ -1,127 +0,0 @@
|
||||
"""Component that will help set the Dlib face detect processing."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
import logging
|
||||
|
||||
import face_recognition
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.image_processing import (
|
||||
CONF_CONFIDENCE,
|
||||
PLATFORM_SCHEMA as IMAGE_PROCESSING_PLATFORM_SCHEMA,
|
||||
FaceInformation,
|
||||
ImageProcessingFaceEntity,
|
||||
)
|
||||
from homeassistant.const import ATTR_NAME, CONF_ENTITY_ID, CONF_NAME, CONF_SOURCE
|
||||
from homeassistant.core import (
|
||||
DOMAIN as HOMEASSISTANT_DOMAIN,
|
||||
HomeAssistant,
|
||||
split_entity_id,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, create_issue
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import CONF_FACES, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
PLATFORM_SCHEMA = IMAGE_PROCESSING_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FACES): {cv.string: cv.isfile},
|
||||
vol.Optional(CONF_CONFIDENCE, default=0.6): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the Dlib Face detection platform."""
|
||||
create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_system_packages_yaml_integration_{DOMAIN}",
|
||||
breaks_in_ha_version="2025.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_system_packages_yaml_integration",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Dlib Face Identify",
|
||||
},
|
||||
)
|
||||
|
||||
confidence: float = config[CONF_CONFIDENCE]
|
||||
faces: dict[str, str] = config[CONF_FACES]
|
||||
source: list[dict[str, str]] = config[CONF_SOURCE]
|
||||
add_entities(
|
||||
DlibFaceIdentifyEntity(
|
||||
camera[CONF_ENTITY_ID],
|
||||
faces,
|
||||
camera.get(CONF_NAME),
|
||||
confidence,
|
||||
)
|
||||
for camera in source
|
||||
)
|
||||
|
||||
|
||||
class DlibFaceIdentifyEntity(ImageProcessingFaceEntity):
|
||||
"""Dlib Face API entity for identify."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
camera_entity: str,
|
||||
faces: dict[str, str],
|
||||
name: str | None,
|
||||
tolerance: float,
|
||||
) -> None:
|
||||
"""Initialize Dlib face identify entry."""
|
||||
|
||||
super().__init__()
|
||||
|
||||
self._attr_camera_entity = camera_entity
|
||||
|
||||
if name:
|
||||
self._attr_name = name
|
||||
else:
|
||||
self._attr_name = f"Dlib Face {split_entity_id(camera_entity)[1]}"
|
||||
|
||||
self._faces = {}
|
||||
for face_name, face_file in faces.items():
|
||||
try:
|
||||
image = face_recognition.load_image_file(face_file)
|
||||
self._faces[face_name] = face_recognition.face_encodings(image)[0]
|
||||
except IndexError as err:
|
||||
_LOGGER.error("Failed to parse %s. Error: %s", face_file, err)
|
||||
|
||||
self._tolerance = tolerance
|
||||
|
||||
def process_image(self, image: bytes) -> None:
|
||||
"""Process image."""
|
||||
|
||||
fak_file = io.BytesIO(image)
|
||||
fak_file.name = "snapshot.jpg"
|
||||
fak_file.seek(0)
|
||||
|
||||
image = face_recognition.load_image_file(fak_file)
|
||||
unknowns = face_recognition.face_encodings(image)
|
||||
|
||||
found: list[FaceInformation] = []
|
||||
for unknown_face in unknowns:
|
||||
for name, face in self._faces.items():
|
||||
result = face_recognition.compare_faces(
|
||||
[face], unknown_face, tolerance=self._tolerance
|
||||
)
|
||||
if result[0]:
|
||||
found.append({ATTR_NAME: name})
|
||||
|
||||
self.process_faces(found, len(unknowns))
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "dlib_face_identify",
|
||||
"name": "Dlib Face Identify",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/dlib_face_identify",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["face_recognition"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["face-recognition==1.2.3"]
|
||||
}
|
||||
@@ -81,6 +81,9 @@
|
||||
"active_map": {
|
||||
"default": "mdi:floor-plan"
|
||||
},
|
||||
"auto_empty": {
|
||||
"default": "mdi:delete-empty"
|
||||
},
|
||||
"water_amount": {
|
||||
"default": "mdi:water"
|
||||
},
|
||||
@@ -160,6 +163,9 @@
|
||||
"advanced_mode": {
|
||||
"default": "mdi:tune"
|
||||
},
|
||||
"border_spin": {
|
||||
"default": "mdi:rotate-right"
|
||||
},
|
||||
"border_switch": {
|
||||
"default": "mdi:land-fields"
|
||||
},
|
||||
|
||||
@@ -5,8 +5,9 @@ from dataclasses import dataclass
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from deebot_client.capabilities import CapabilityMap, CapabilitySet, CapabilitySetTypes
|
||||
from deebot_client.command import CommandWithMessageHandling
|
||||
from deebot_client.device import Device
|
||||
from deebot_client.events import WorkModeEvent
|
||||
from deebot_client.events import WorkModeEvent, auto_empty
|
||||
from deebot_client.events.base import Event
|
||||
from deebot_client.events.map import CachedMapInfoEvent, MajorMapEvent
|
||||
from deebot_client.events.water_info import WaterAmountEvent
|
||||
@@ -34,6 +35,9 @@ class EcovacsSelectEntityDescription[EventT: Event](
|
||||
|
||||
current_option_fn: Callable[[EventT], str | None]
|
||||
options_fn: Callable[[CapabilitySetTypes], list[str]]
|
||||
set_option_fn: Callable[[CapabilitySetTypes, str], CommandWithMessageHandling] = (
|
||||
lambda cap, option: cap.set(option)
|
||||
)
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
@@ -58,6 +62,14 @@ ENTITY_DESCRIPTIONS: tuple[EcovacsSelectEntityDescription, ...] = (
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
EcovacsSelectEntityDescription[auto_empty.AutoEmptyEvent](
|
||||
capability_fn=lambda caps: caps.station.auto_empty if caps.station else None,
|
||||
current_option_fn=lambda e: get_name_key(e.frequency) if e.frequency else None,
|
||||
options_fn=lambda cap: [get_name_key(freq) for freq in cap.types],
|
||||
set_option_fn=lambda cap, option: cap.set(None, option),
|
||||
key="auto_empty",
|
||||
translation_key="auto_empty",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -106,14 +118,17 @@ class EcovacsSelectEntity[EventT: Event](
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def on_event(event: EventT) -> None:
|
||||
self._attr_current_option = self.entity_description.current_option_fn(event)
|
||||
self.async_write_ha_state()
|
||||
if (option := self.entity_description.current_option_fn(event)) is not None:
|
||||
self._attr_current_option = option
|
||||
self.async_write_ha_state()
|
||||
|
||||
self._subscribe(self._capability.event, on_event)
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
await self._device.execute_command(self._capability.set(option))
|
||||
await self._device.execute_command(
|
||||
self.entity_description.set_option_fn(self._capability, option)
|
||||
)
|
||||
|
||||
|
||||
class EcovacsActiveMapSelectEntity(
|
||||
|
||||
@@ -129,6 +129,16 @@
|
||||
"active_map": {
|
||||
"name": "Active map"
|
||||
},
|
||||
"auto_empty": {
|
||||
"name": "Auto-empty frequency",
|
||||
"state": {
|
||||
"auto": "Auto",
|
||||
"min_10": "10 minutes",
|
||||
"min_15": "15 minutes",
|
||||
"min_25": "25 minutes",
|
||||
"smart": "Smart"
|
||||
}
|
||||
},
|
||||
"water_amount": {
|
||||
"name": "[%key:component::ecovacs::entity::number::water_amount::name%]",
|
||||
"state": {
|
||||
@@ -231,6 +241,9 @@
|
||||
"advanced_mode": {
|
||||
"name": "Advanced mode"
|
||||
},
|
||||
"border_spin": {
|
||||
"name": "Border spin"
|
||||
},
|
||||
"border_switch": {
|
||||
"name": "Border switch"
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user