mirror of
https://github.com/home-assistant/core.git
synced 2025-12-25 01:08:05 +00:00
Compare commits
466 Commits
echo
...
manual_tri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
635669278c | ||
|
|
8536f2b4cb | ||
|
|
387bf83ba8 | ||
|
|
18b0f54a3e | ||
|
|
f76e295204 | ||
|
|
e63b17cd58 | ||
|
|
05e23f0fc7 | ||
|
|
fca4ef3b1e | ||
|
|
1226354823 | ||
|
|
40099547ef | ||
|
|
de4540c68e | ||
|
|
d006d33dc0 | ||
|
|
4c8a58f7cc | ||
|
|
8d6178ffa6 | ||
|
|
0a3562aca3 | ||
|
|
c9abe76023 | ||
|
|
0c803520a3 | ||
|
|
5ac3fe6ee1 | ||
|
|
b7bedd4b8f | ||
|
|
0694f9e164 | ||
|
|
5b1f3d3e7f | ||
|
|
d922c723d4 | ||
|
|
3eadfcc01d | ||
|
|
29f680f912 | ||
|
|
ee2b53ed0f | ||
|
|
b0b5567316 | ||
|
|
e6c946b3f4 | ||
|
|
b2c7c5b1aa | ||
|
|
220509fd6c | ||
|
|
7293ae5d51 | ||
|
|
4a7fd89abd | ||
|
|
077ff63b38 | ||
|
|
55fd5fa869 | ||
|
|
e3eb6051de | ||
|
|
3e9304253d | ||
|
|
cc8ed2c228 | ||
|
|
89b655c192 | ||
|
|
56ddfa9ff8 | ||
|
|
a2a11ad02e | ||
|
|
f7927f9da1 | ||
|
|
13918f07d8 | ||
|
|
35825be12b | ||
|
|
1786bb9903 | ||
|
|
3588784f1e | ||
|
|
2cce1b024e | ||
|
|
c168695323 | ||
|
|
913a4ee9ba | ||
|
|
dd21d48ae4 | ||
|
|
b3f14d72c0 | ||
|
|
51beb1c0a8 | ||
|
|
0c5766184b | ||
|
|
b1a2b89691 | ||
|
|
4813da33d6 | ||
|
|
d4099ab917 | ||
|
|
ee206938d8 | ||
|
|
9fe08f292d | ||
|
|
9a331de878 | ||
|
|
2de941bc11 | ||
|
|
c5e0418f75 | ||
|
|
679b57e450 | ||
|
|
91eba0855e | ||
|
|
43f48b8562 | ||
|
|
df95902004 | ||
|
|
3edc7913de | ||
|
|
1852052dff | ||
|
|
fe5cd5c55c | ||
|
|
042e4d20c5 | ||
|
|
dfe1921737 | ||
|
|
2c620f1f60 | ||
|
|
66a17bd072 | ||
|
|
18217a594f | ||
|
|
a6e2dc485b | ||
|
|
8e7960fa0e | ||
|
|
1dc6a94093 | ||
|
|
615d79b429 | ||
|
|
ebd6daa31d | ||
|
|
d6750624ce | ||
|
|
577b22374a | ||
|
|
ee1fe2cae4 | ||
|
|
db05aa17d3 | ||
|
|
b1ee019e3a | ||
|
|
b43a7ff1fe | ||
|
|
2d6068b842 | ||
|
|
ac4c379a0e | ||
|
|
00b7c4f9ef | ||
|
|
3f48826370 | ||
|
|
ed06831e9d | ||
|
|
c21234672d | ||
|
|
32950df0b7 | ||
|
|
0f615bbe4f | ||
|
|
5a6ffe1901 | ||
|
|
6ce48eab45 | ||
|
|
437e545116 | ||
|
|
1a80934593 | ||
|
|
455363871f | ||
|
|
39bc37d225 | ||
|
|
90fc6ffdbf | ||
|
|
086c91485f | ||
|
|
bf27ccce17 | ||
|
|
70bb56e0fc | ||
|
|
49c27ae7bc | ||
|
|
e9bb4625d8 | ||
|
|
2e077cbf12 | ||
|
|
271d225e51 | ||
|
|
fca19a3ec1 | ||
|
|
0681652aec | ||
|
|
5fa5d08b18 | ||
|
|
0f0866cd52 | ||
|
|
1b27365c58 | ||
|
|
3cd7f50216 | ||
|
|
40d2d6df2c | ||
|
|
c2a7736417 | ||
|
|
ac15d9b3d4 | ||
|
|
228a4eb391 | ||
|
|
030a1460de | ||
|
|
d157919da2 | ||
|
|
b79c6e772a | ||
|
|
d6f9040baf | ||
|
|
0310418efc | ||
|
|
62dc0ac485 | ||
|
|
9a62b0f245 | ||
|
|
a296c5e9ad | ||
|
|
12cb349160 | ||
|
|
5cf56ec113 | ||
|
|
1be9836663 | ||
|
|
9d10e0e054 | ||
|
|
05df572951 | ||
|
|
6953c20a65 | ||
|
|
4e8186491c | ||
|
|
6fa93edf27 | ||
|
|
ef13b35c35 | ||
|
|
0afdd9556f | ||
|
|
e11ead410b | ||
|
|
ef7058f703 | ||
|
|
938855bea3 | ||
|
|
4c00c56afd | ||
|
|
8cc7e7b76f | ||
|
|
df006aeade | ||
|
|
ffac522554 | ||
|
|
9502dbee56 | ||
|
|
a339fbaa82 | ||
|
|
b02eaed6b0 | ||
|
|
df594748cf | ||
|
|
744a7a0e82 | ||
|
|
f677b910a6 | ||
|
|
0da6b28808 | ||
|
|
f111a2c34a | ||
|
|
59eb323f8d | ||
|
|
7ae13a4d72 | ||
|
|
735b843f5e | ||
|
|
5b1783e859 | ||
|
|
7b14b6af0e | ||
|
|
cc18ec2de8 | ||
|
|
df59adf5d1 | ||
|
|
8c98cede60 | ||
|
|
b1a70c86c3 | ||
|
|
63daed0ed6 | ||
|
|
2150a668b0 | ||
|
|
b505722f38 | ||
|
|
036eef2b6b | ||
|
|
f3fb7cd8e8 | ||
|
|
42f55bf271 | ||
|
|
6d7dad41d9 | ||
|
|
9dbce6d904 | ||
|
|
7f0db3181d | ||
|
|
2e972422c2 | ||
|
|
3a21c36173 | ||
|
|
25ee2e58a5 | ||
|
|
561b3ae21b | ||
|
|
5be7f49146 | ||
|
|
2694828451 | ||
|
|
3eea932b24 | ||
|
|
468208502f | ||
|
|
92268f894a | ||
|
|
5e5fd6a2f2 | ||
|
|
cadee73da8 | ||
|
|
51099ae7d6 | ||
|
|
b777c29bab | ||
|
|
fc1190dafd | ||
|
|
775a81829b | ||
|
|
998757f09e | ||
|
|
b964bc58be | ||
|
|
bd80a78848 | ||
|
|
37c8764426 | ||
|
|
9262dec444 | ||
|
|
3c3c4d2641 | ||
|
|
c1898ece80 | ||
|
|
fdf69fcd7d | ||
|
|
e403bee95b | ||
|
|
9be8fd4eac | ||
|
|
e09b40c2bd | ||
|
|
2826198d5d | ||
|
|
5324f3e542 | ||
|
|
7e97ef588b | ||
|
|
bb120020a8 | ||
|
|
bb9aba2a7d | ||
|
|
b676c2f61b | ||
|
|
0c092f80c7 | ||
|
|
2bf592d8aa | ||
|
|
e591157e37 | ||
|
|
ee01aa73b8 | ||
|
|
0f827fbf22 | ||
|
|
4dca4a64b5 | ||
|
|
b82886a3e1 | ||
|
|
fe396cdf4b | ||
|
|
5895245a31 | ||
|
|
861ba0ee5e | ||
|
|
d15f9edc57 | ||
|
|
cab6ec0363 | ||
|
|
eb26a2124b | ||
|
|
4530fe4bf7 | ||
|
|
b1865de58f | ||
|
|
3ff04d6d04 | ||
|
|
bd306abace | ||
|
|
412ceca6f7 | ||
|
|
8644fb1887 | ||
|
|
622be70fee | ||
|
|
7bc0c1b912 | ||
|
|
3230e741e9 | ||
|
|
81db3dea41 | ||
|
|
fe348e17a3 | ||
|
|
03f6508bd8 | ||
|
|
fd47d6578e | ||
|
|
df6a5d7459 | ||
|
|
b8a0cdea12 | ||
|
|
570e11ba5b | ||
|
|
19704cff04 | ||
|
|
51c09c2aa4 | ||
|
|
ef46552146 | ||
|
|
75533463f7 | ||
|
|
2cd496fdaf | ||
|
|
cd4c79450b | ||
|
|
a1d1f6ec97 | ||
|
|
a910fb879c | ||
|
|
4e904bf5a3 | ||
|
|
38cc26485a | ||
|
|
2bba185e4c | ||
|
|
743cc42829 | ||
|
|
f3021b40ab | ||
|
|
9ec9110e1e | ||
|
|
433c2cb43e | ||
|
|
fcffe5151d | ||
|
|
ca1677cc46 | ||
|
|
27f7085b61 | ||
|
|
f607b95c00 | ||
|
|
72502c1a15 | ||
|
|
47e78e9008 | ||
|
|
1fb51ef189 | ||
|
|
f96e31fad8 | ||
|
|
e99bf21a36 | ||
|
|
3059d06960 | ||
|
|
2b55f3af36 | ||
|
|
776501f5e6 | ||
|
|
1f93d2cefb | ||
|
|
1633700a58 | ||
|
|
923ec71bf6 | ||
|
|
7566046995 | ||
|
|
b9dbf07a5e | ||
|
|
b8b153b87f | ||
|
|
d4dd8fd902 | ||
|
|
a3bc55f49b | ||
|
|
7ba94a680d | ||
|
|
664e09790c | ||
|
|
d45fce86a9 | ||
|
|
507c0739df | ||
|
|
d7301c62e2 | ||
|
|
befed910da | ||
|
|
2509353221 | ||
|
|
694a77fe3c | ||
|
|
bc7f5f3981 | ||
|
|
cea5cda881 | ||
|
|
9e063fd77c | ||
|
|
01fb6841da | ||
|
|
48d3dd88a1 | ||
|
|
051cc41d4f | ||
|
|
661b55d6eb | ||
|
|
d197acc069 | ||
|
|
bf190a8a73 | ||
|
|
c386abd49d | ||
|
|
6342d8334b | ||
|
|
24bb13e0d1 | ||
|
|
212c42ca77 | ||
|
|
54843bb422 | ||
|
|
c115a7f455 | ||
|
|
597c0ab985 | ||
|
|
b86bb75e5e | ||
|
|
b662d32e44 | ||
|
|
72f690d681 | ||
|
|
33c9f3cc7d | ||
|
|
a1076300c8 | ||
|
|
dc92e912c2 | ||
|
|
2451e5578a | ||
|
|
1c83dab0a1 | ||
|
|
b42973040c | ||
|
|
6507955a14 | ||
|
|
79dbc70470 | ||
|
|
2bab7436d3 | ||
|
|
60479369b6 | ||
|
|
ec3f5561dc | ||
|
|
2e5f56b70d | ||
|
|
461039f06a | ||
|
|
351e594fe4 | ||
|
|
377da5f954 | ||
|
|
51a881f3b5 | ||
|
|
5025e31129 | ||
|
|
f98720e525 | ||
|
|
37240e811b | ||
|
|
0b7a023d2e | ||
|
|
beec67a247 | ||
|
|
571349e3a2 | ||
|
|
d9eb248e91 | ||
|
|
fc8affd243 | ||
|
|
4d6fd1b10f | ||
|
|
257242e6e3 | ||
|
|
7f494c235c | ||
|
|
8c42db7501 | ||
|
|
183bbcd1e1 | ||
|
|
8c4b8028cf | ||
|
|
ea1045d826 | ||
|
|
db5bf41790 | ||
|
|
580c6f2684 | ||
|
|
d62c18c225 | ||
|
|
8f9f9bc8e7 | ||
|
|
6ad6e82a23 | ||
|
|
3d507c7b44 | ||
|
|
4f5c7353f8 | ||
|
|
0b961d98f5 | ||
|
|
1cd82ab8ee | ||
|
|
cc792403ab | ||
|
|
3d2ab3b59e | ||
|
|
ba1650bd05 | ||
|
|
df5f6fc1e6 | ||
|
|
0dbdb42947 | ||
|
|
325022ec77 | ||
|
|
3ea1d2823e | ||
|
|
83d9c000d3 | ||
|
|
266612e4d9 | ||
|
|
dc7cba60bd | ||
|
|
d752a3a24c | ||
|
|
8c3ee80203 | ||
|
|
94555f533b | ||
|
|
6da33a8883 | ||
|
|
d42e31b5e7 | ||
|
|
441917706b | ||
|
|
12e530dc75 | ||
|
|
59651c6f10 | ||
|
|
ac21d2855c | ||
|
|
6070feea73 | ||
|
|
167881e434 | ||
|
|
35bcf82627 | ||
|
|
66bb501621 | ||
|
|
179ba8309d | ||
|
|
2b7543aca2 | ||
|
|
1e49e04491 | ||
|
|
e60b6482ab | ||
|
|
7b82781f4c | ||
|
|
b40daf0152 | ||
|
|
417ac56bd6 | ||
|
|
c9a0814142 | ||
|
|
2d8a619b54 | ||
|
|
759cc3303a | ||
|
|
5328429b08 | ||
|
|
21b98a76cc | ||
|
|
95f632a13a | ||
|
|
33d4d1f8e5 | ||
|
|
72878c18d0 | ||
|
|
ccd220ad0f | ||
|
|
f191f6ae22 | ||
|
|
28a18e538d | ||
|
|
c2f6255d16 | ||
|
|
e5fd08ae76 | ||
|
|
4b5633d9d8 | ||
|
|
a9c6a06704 | ||
|
|
0faa8efd5a | ||
|
|
5a257b090e | ||
|
|
41fb6a537f | ||
|
|
b166c32eb8 | ||
|
|
288acfb511 | ||
|
|
2cb9682303 | ||
|
|
7e52170789 | ||
|
|
979b3d4269 | ||
|
|
9772014bce | ||
|
|
f8763c49ef | ||
|
|
b4ef00659c | ||
|
|
df49c53bb6 | ||
|
|
8dfe483b38 | ||
|
|
b45d7cbbc3 | ||
|
|
239ba9b1cc | ||
|
|
2d5a75d4f2 | ||
|
|
e1ad3f05e6 | ||
|
|
b9280edbfa | ||
|
|
010993fc5f | ||
|
|
713931661e | ||
|
|
af06521f66 | ||
|
|
c32f57f85a | ||
|
|
171061a778 | ||
|
|
476ea35bdb | ||
|
|
00e6866664 | ||
|
|
201bf95ab8 | ||
|
|
ff22bbd0e4 | ||
|
|
fd8d4e937c | ||
|
|
7903348d79 | ||
|
|
090dbba06e | ||
|
|
af77e69eb0 | ||
|
|
23e7638687 | ||
|
|
36b722960a | ||
|
|
3dd241a398 | ||
|
|
b5a9c3d1f6 | ||
|
|
eca714a45a | ||
|
|
8049699efb | ||
|
|
7c6afd50dc | ||
|
|
42d8889778 | ||
|
|
a4c0304e1f | ||
|
|
c63e688ba8 | ||
|
|
16298b4195 | ||
|
|
da23eb22db | ||
|
|
4bd1d0199b | ||
|
|
efe7050030 | ||
|
|
79ff85f517 | ||
|
|
73ad4caf94 | ||
|
|
e3d649d349 | ||
|
|
657e3488ba | ||
|
|
7508c14a53 | ||
|
|
ac84970da8 | ||
|
|
30073f3493 | ||
|
|
3abd7b8ba3 | ||
|
|
62bc6e4bf6 | ||
|
|
5faa189fef | ||
|
|
e09ae1c83d | ||
|
|
7b20299de7 | ||
|
|
81e501aba1 | ||
|
|
568ac22ce8 | ||
|
|
c71ab054f1 | ||
|
|
bea201f9f6 | ||
|
|
dda90bc04c | ||
|
|
a033e4c88d | ||
|
|
42b6f83e7c | ||
|
|
cb937bc115 | ||
|
|
bec569caf9 | ||
|
|
3390fb32a8 | ||
|
|
3ebb58f780 | ||
|
|
30b131d3b9 | ||
|
|
cd40232beb | ||
|
|
f27fe365c5 | ||
|
|
1c769418fb | ||
|
|
db7c2dab52 | ||
|
|
627377872b | ||
|
|
8504162539 | ||
|
|
67c6a1d436 | ||
|
|
1c5eb92c9c | ||
|
|
3337dd4ed7 | ||
|
|
f1d21685e6 | ||
|
|
73f27549e4 | ||
|
|
1882b914dc | ||
|
|
06f99dc9ba | ||
|
|
2e2c718d94 | ||
|
|
b8f56a6ed6 | ||
|
|
db37dbec03 | ||
|
|
579f44468e | ||
|
|
d452e957c9 | ||
|
|
5f9bcd583b | ||
|
|
c0c508c7a2 | ||
|
|
13f5adfa84 | ||
|
|
a07a3a61bf | ||
|
|
848162debd | ||
|
|
07cd669bc1 |
36
.github/workflows/builder.yml
vendored
36
.github/workflows/builder.yml
vendored
@@ -69,7 +69,7 @@ jobs:
|
||||
run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T -
|
||||
|
||||
- name: Upload translations
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: translations
|
||||
path: translations.tar.gz
|
||||
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -448,6 +448,9 @@ jobs:
|
||||
environment: ${{ needs.init.outputs.channel }}
|
||||
needs: ["init", "build_base"]
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
id-token: write
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
@@ -459,7 +462,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -473,16 +476,13 @@ jobs:
|
||||
run: |
|
||||
# Remove dist, build, and homeassistant.egg-info
|
||||
# when build locally for testing!
|
||||
pip install twine build
|
||||
pip install build
|
||||
python -m build
|
||||
|
||||
- name: Upload package
|
||||
shell: bash
|
||||
run: |
|
||||
export TWINE_USERNAME="__token__"
|
||||
export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}"
|
||||
|
||||
twine upload dist/* --skip-existing
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
hassfest-image:
|
||||
name: Build and test hassfest image
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
||||
uses: actions/attest-build-provenance@bd77c077858b8d561b7a36cbe48ef4cc642ca39d # v2.2.2
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
78
.github/workflows/ci.yaml
vendored
78
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.3"
|
||||
HA_SHORT_VERSION: "2025.4"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -240,7 +240,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -256,7 +256,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -286,7 +286,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -295,7 +295,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -326,7 +326,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -335,7 +335,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -366,7 +366,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -375,7 +375,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -482,7 +482,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -490,7 +490,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -537,7 +537,7 @@ jobs:
|
||||
python --version
|
||||
uv pip freeze >> pip_freeze.txt
|
||||
- name: Upload pip_freeze artifact
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pip-freeze-${{ matrix.python-version }}
|
||||
path: pip_freeze.txt
|
||||
@@ -578,7 +578,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -611,7 +611,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -649,7 +649,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -661,7 +661,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.licenses extract --output-file=licenses-${{ matrix.python-version }}.json
|
||||
- name: Upload licenses
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: licenses-${{ github.run_number }}-${{ matrix.python-version }}
|
||||
path: licenses-${{ matrix.python-version }}.json
|
||||
@@ -692,7 +692,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -739,7 +739,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -791,7 +791,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -799,7 +799,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.1
|
||||
uses: actions/cache@v4.2.2
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -865,7 +865,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -877,7 +877,7 @@ jobs:
|
||||
. venv/bin/activate
|
||||
python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
@@ -929,7 +929,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -942,7 +942,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -980,14 +980,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-full.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1051,7 +1051,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1108,7 +1108,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1116,7 +1116,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.mariadb }}
|
||||
@@ -1181,7 +1181,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1239,7 +1239,7 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1247,7 +1247,7 @@ jobs:
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{
|
||||
steps.pytest-partial.outputs.postgresql }}
|
||||
@@ -1271,12 +1271,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.3.1
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1328,7 +1328,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1382,14 +1382,14 @@ jobs:
|
||||
2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt
|
||||
- name: Upload pytest output
|
||||
if: success() || failure() && steps.pytest-partial.conclusion == 'failure'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: pytest-*.txt
|
||||
overwrite: true
|
||||
- name: Upload coverage artifact
|
||||
if: needs.info.outputs.skip_coverage != 'true'
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}-${{ matrix.group }}
|
||||
path: coverage.xml
|
||||
@@ -1410,12 +1410,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.3.1
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.28.9
|
||||
uses: github/codeql-action/init@v3.28.10
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.28.9
|
||||
uses: github/codeql-action/analyze@v3.28.10
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
66
.github/workflows/wheels.yml
vendored
66
.github/workflows/wheels.yml
vendored
@@ -91,7 +91,7 @@ jobs:
|
||||
) > build_constraints.txt
|
||||
|
||||
- name: Upload env_file
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: env_file
|
||||
path: ./.env_file
|
||||
@@ -99,14 +99,14 @@ jobs:
|
||||
overwrite: true
|
||||
|
||||
- name: Upload build_constraints
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: build_constraints
|
||||
path: ./build_constraints.txt
|
||||
overwrite: true
|
||||
|
||||
- name: Upload requirements_diff
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: requirements_diff
|
||||
path: ./requirements_diff.txt
|
||||
@@ -118,7 +118,7 @@ jobs:
|
||||
python -m script.gen_requirements_all ci
|
||||
|
||||
- name: Upload requirements_all_wheels
|
||||
uses: actions/upload-artifact@v4.6.0
|
||||
uses: actions/upload-artifact@v4.6.1
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
path: ./requirements_all_wheels_*.txt
|
||||
@@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -218,16 +218,8 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -238,32 +230,4 @@ jobs:
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
requirements: "requirements_all.txt"
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.7
|
||||
rev: v0.9.8
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
||||
@@ -103,6 +103,7 @@ homeassistant.components.auth.*
|
||||
homeassistant.components.automation.*
|
||||
homeassistant.components.awair.*
|
||||
homeassistant.components.axis.*
|
||||
homeassistant.components.azure_storage.*
|
||||
homeassistant.components.backup.*
|
||||
homeassistant.components.baf.*
|
||||
homeassistant.components.bang_olufsen.*
|
||||
@@ -395,6 +396,7 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.pyload.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qbus.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
@@ -407,6 +409,7 @@ homeassistant.components.raspberry_pi.*
|
||||
homeassistant.components.rdw.*
|
||||
homeassistant.components.recollect_waste.*
|
||||
homeassistant.components.recorder.*
|
||||
homeassistant.components.remember_the_milk.*
|
||||
homeassistant.components.remote.*
|
||||
homeassistant.components.renault.*
|
||||
homeassistant.components.reolink.*
|
||||
@@ -526,6 +529,7 @@ homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
homeassistant.components.wake_on_lan.*
|
||||
homeassistant.components.wake_word.*
|
||||
homeassistant.components.wallbox.*
|
||||
|
||||
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
@@ -38,7 +38,6 @@
|
||||
"module": "pytest",
|
||||
"justMyCode": false,
|
||||
"args": [
|
||||
"--timeout=10",
|
||||
"--picked"
|
||||
],
|
||||
},
|
||||
|
||||
20
CODEOWNERS
generated
20
CODEOWNERS
generated
@@ -180,6 +180,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/azure_event_hub/ @eavanvalkenburg
|
||||
/tests/components/azure_event_hub/ @eavanvalkenburg
|
||||
/homeassistant/components/azure_service_bus/ @hfurubotten
|
||||
/homeassistant/components/azure_storage/ @zweckj
|
||||
/tests/components/azure_storage/ @zweckj
|
||||
/homeassistant/components/backup/ @home-assistant/core
|
||||
/tests/components/backup/ @home-assistant/core
|
||||
/homeassistant/components/baf/ @bdraco @jfroy
|
||||
@@ -967,8 +969,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/motionblinds_ble/ @LennP @jerrybboy
|
||||
/homeassistant/components/motioneye/ @dermotduffy
|
||||
/tests/components/motioneye/ @dermotduffy
|
||||
/homeassistant/components/motionmount/ @RJPoelstra
|
||||
/tests/components/motionmount/ @RJPoelstra
|
||||
/homeassistant/components/motionmount/ @laiho-vogels
|
||||
/tests/components/motionmount/ @laiho-vogels
|
||||
/homeassistant/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/tests/components/mqtt/ @emontnemery @jbouwh @bdraco
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
@@ -1051,8 +1053,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/numato/ @clssn
|
||||
/homeassistant/components/number/ @home-assistant/core @Shulyaka
|
||||
/tests/components/number/ @home-assistant/core @Shulyaka
|
||||
/homeassistant/components/nut/ @bdraco @ollo69 @pestevez
|
||||
/tests/components/nut/ @bdraco @ollo69 @pestevez
|
||||
/homeassistant/components/nut/ @bdraco @ollo69 @pestevez @tdfountain
|
||||
/tests/components/nut/ @bdraco @ollo69 @pestevez @tdfountain
|
||||
/homeassistant/components/nws/ @MatthewFlamm @kamiyo
|
||||
/tests/components/nws/ @MatthewFlamm @kamiyo
|
||||
/homeassistant/components/nyt_games/ @joostlek
|
||||
@@ -1144,8 +1146,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/philips_js/ @elupus
|
||||
/homeassistant/components/pi_hole/ @shenxn
|
||||
/tests/components/pi_hole/ @shenxn
|
||||
/homeassistant/components/picnic/ @corneyl
|
||||
/tests/components/picnic/ @corneyl
|
||||
/homeassistant/components/picnic/ @corneyl @codesalatdev
|
||||
/tests/components/picnic/ @corneyl @codesalatdev
|
||||
/homeassistant/components/ping/ @jpbede
|
||||
/tests/components/ping/ @jpbede
|
||||
/homeassistant/components/plaato/ @JohNan
|
||||
@@ -1399,6 +1401,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @joostlek
|
||||
/tests/components/smartthings/ @joostlek
|
||||
/homeassistant/components/smarttub/ @mdz
|
||||
/tests/components/smarttub/ @mdz
|
||||
/homeassistant/components/smarty/ @z0mbieprocess
|
||||
@@ -1413,6 +1417,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/snapcast/ @luar123
|
||||
/homeassistant/components/snmp/ @nmaggioni
|
||||
/tests/components/snmp/ @nmaggioni
|
||||
/homeassistant/components/snoo/ @Lash-L
|
||||
/tests/components/snoo/ @Lash-L
|
||||
/homeassistant/components/snooz/ @AustinBrunkhorst
|
||||
/tests/components/snooz/ @AustinBrunkhorst
|
||||
/homeassistant/components/solaredge/ @frenck @bdraco
|
||||
@@ -1693,6 +1699,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/weatherflow_cloud/ @jeeftor
|
||||
/homeassistant/components/weatherkit/ @tjhorner
|
||||
/tests/components/weatherkit/ @tjhorner
|
||||
/homeassistant/components/webdav/ @jpbede
|
||||
/tests/components/webdav/ @jpbede
|
||||
/homeassistant/components/webhook/ @home-assistant/core
|
||||
/tests/components/webhook/ @home-assistant/core
|
||||
/homeassistant/components/webmin/ @autinerd
|
||||
|
||||
@@ -74,6 +74,7 @@ from .core_config import async_process_ha_core_config
|
||||
from .exceptions import HomeAssistantError
|
||||
from .helpers import (
|
||||
area_registry,
|
||||
backup,
|
||||
category_registry,
|
||||
config_validation as cv,
|
||||
device_registry,
|
||||
@@ -163,16 +164,6 @@ FRONTEND_INTEGRATIONS = {
|
||||
# integrations can be removed and database migration status is
|
||||
# visible in frontend
|
||||
"frontend",
|
||||
# Hassio is an after dependency of backup, after dependencies
|
||||
# are not promoted from stage 2 to earlier stages, so we need to
|
||||
# add it here. Hassio needs to be setup before backup, otherwise
|
||||
# the backup integration will think we are a container/core install
|
||||
# when using HAOS or Supervised install.
|
||||
"hassio",
|
||||
# Backup is an after dependency of frontend, after dependencies
|
||||
# are not promoted from stage 2 to earlier stages, so we need to
|
||||
# add it here.
|
||||
"backup",
|
||||
}
|
||||
# Stage 0 is divided into substages. Each substage has a name, a set of integrations and a timeout.
|
||||
# The substage containing recorder should have no timeout, as it could cancel a database migration.
|
||||
@@ -206,6 +197,8 @@ STAGE_1_INTEGRATIONS = {
|
||||
"mqtt_eventstream",
|
||||
# To provide account link implementations
|
||||
"cloud",
|
||||
# Ensure supervisor is available
|
||||
"hassio",
|
||||
}
|
||||
|
||||
DEFAULT_INTEGRATIONS = {
|
||||
@@ -328,10 +321,10 @@ async def async_setup_hass(
|
||||
|
||||
block_async_io.enable()
|
||||
|
||||
config_dict = None
|
||||
basic_setup_success = False
|
||||
|
||||
if not (recovery_mode := runtime_config.recovery_mode):
|
||||
config_dict = None
|
||||
basic_setup_success = False
|
||||
|
||||
await hass.async_add_executor_job(conf_util.process_ha_config_upgrade, hass)
|
||||
|
||||
try:
|
||||
@@ -349,39 +342,43 @@ async def async_setup_hass(
|
||||
await async_from_config_dict(config_dict, hass) is not None
|
||||
)
|
||||
|
||||
if config_dict is None:
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
if config_dict is None:
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
elif not basic_setup_success:
|
||||
_LOGGER.warning("Unable to set up core integrations. Activating recovery mode")
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
elif not basic_setup_success:
|
||||
_LOGGER.warning(
|
||||
"Unable to set up core integrations. Activating recovery mode"
|
||||
)
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
elif any(domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS):
|
||||
_LOGGER.warning(
|
||||
"Detected that %s did not load. Activating recovery mode",
|
||||
",".join(CRITICAL_INTEGRATIONS),
|
||||
)
|
||||
elif any(
|
||||
domain not in hass.config.components for domain in CRITICAL_INTEGRATIONS
|
||||
):
|
||||
_LOGGER.warning(
|
||||
"Detected that %s did not load. Activating recovery mode",
|
||||
",".join(CRITICAL_INTEGRATIONS),
|
||||
)
|
||||
|
||||
old_config = hass.config
|
||||
old_logging = hass.data.get(DATA_LOGGING)
|
||||
old_config = hass.config
|
||||
old_logging = hass.data.get(DATA_LOGGING)
|
||||
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
recovery_mode = True
|
||||
await stop_hass(hass)
|
||||
hass = await create_hass()
|
||||
|
||||
if old_logging:
|
||||
hass.data[DATA_LOGGING] = old_logging
|
||||
hass.config.debug = old_config.debug
|
||||
hass.config.skip_pip = old_config.skip_pip
|
||||
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
||||
hass.config.internal_url = old_config.internal_url
|
||||
hass.config.external_url = old_config.external_url
|
||||
# Setup loader cache after the config dir has been set
|
||||
loader.async_setup(hass)
|
||||
if old_logging:
|
||||
hass.data[DATA_LOGGING] = old_logging
|
||||
hass.config.debug = old_config.debug
|
||||
hass.config.skip_pip = old_config.skip_pip
|
||||
hass.config.skip_pip_packages = old_config.skip_pip_packages
|
||||
hass.config.internal_url = old_config.internal_url
|
||||
hass.config.external_url = old_config.external_url
|
||||
# Setup loader cache after the config dir has been set
|
||||
loader.async_setup(hass)
|
||||
|
||||
if recovery_mode:
|
||||
_LOGGER.info("Starting in recovery mode")
|
||||
@@ -901,6 +898,10 @@ async def _async_set_up_integrations(
|
||||
if "recorder" in domains_to_setup:
|
||||
recorder.async_initialize_recorder(hass)
|
||||
|
||||
# Initialize backup
|
||||
if "backup" in domains_to_setup:
|
||||
backup.async_initialize_backup(hass)
|
||||
|
||||
stage_0_and_1_domains: list[tuple[str, set[str], int | None]] = [
|
||||
*(
|
||||
(name, domain_group & domains_to_setup, timeout)
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
"azure_devops",
|
||||
"azure_event_hub",
|
||||
"azure_service_bus",
|
||||
"azure_storage",
|
||||
"microsoft_face_detect",
|
||||
"microsoft_face_identify",
|
||||
"microsoft_face",
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.0.0"],
|
||||
"requirements": ["accuweather==4.1.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["adext", "alarmdecoder"],
|
||||
"requirements": ["adext==0.4.3"]
|
||||
"requirements": ["adext==0.4.4"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
from homeassistant.exceptions import ServiceNotFound, ServiceValidationError
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
@@ -195,7 +195,8 @@ class AlertEntity(Entity):
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Async Acknowledge alert."""
|
||||
LOGGER.debug("Acknowledged Alert: %s", self._attr_name)
|
||||
if not self._can_ack:
|
||||
raise ServiceValidationError("This alert cannot be acknowledged")
|
||||
self._ack = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
@@ -7,6 +7,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"requirements": ["androidtvremote2==0.1.2"],
|
||||
"requirements": ["androidtvremote2==0.2.0"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -10,7 +12,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
|
||||
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -20,14 +22,13 @@ type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY])
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
@@ -59,13 +60,10 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY])
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
||||
)
|
||||
await client.models.list(timeout=10.0)
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.44.0"]
|
||||
"requirements": ["anthropic==0.47.2"]
|
||||
}
|
||||
|
||||
@@ -233,7 +233,6 @@ class AppleTVManager(DeviceListener):
|
||||
pass
|
||||
except Exception:
|
||||
_LOGGER.exception("Failed to connect")
|
||||
await self.disconnect()
|
||||
|
||||
async def _connect_loop(self) -> None:
|
||||
"""Connect loop background task function."""
|
||||
|
||||
@@ -117,7 +117,7 @@ async def async_pipeline_from_audio_stream(
|
||||
"""
|
||||
with chat_session.async_get_chat_session(hass, conversation_id) as session:
|
||||
pipeline_input = PipelineInput(
|
||||
conversation_id=session.conversation_id,
|
||||
session=session,
|
||||
device_id=device_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
|
||||
@@ -19,14 +19,7 @@ import wave
|
||||
import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import (
|
||||
conversation,
|
||||
media_source,
|
||||
stt,
|
||||
tts,
|
||||
wake_word,
|
||||
websocket_api,
|
||||
)
|
||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
@@ -96,6 +89,9 @@ ENGINE_LANGUAGE_PAIRS = (
|
||||
)
|
||||
|
||||
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
|
||||
"pipeline_conversation_data"
|
||||
)
|
||||
|
||||
|
||||
def validate_language(data: dict[str, Any]) -> Any:
|
||||
@@ -566,8 +562,7 @@ class PipelineRun:
|
||||
|
||||
id: str = field(default_factory=ulid_util.ulid_now)
|
||||
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
|
||||
tts_engine: str = field(init=False, repr=False)
|
||||
tts_options: dict | None = field(init=False, default=None)
|
||||
tts_stream: tts.ResultStream | None = field(init=False, default=None)
|
||||
wake_word_entity_id: str | None = field(init=False, default=None, repr=False)
|
||||
wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False)
|
||||
|
||||
@@ -590,6 +585,12 @@ class PipelineRun:
|
||||
_device_id: str | None = None
|
||||
"""Optional device id set during run start."""
|
||||
|
||||
_conversation_data: PipelineConversationData | None = None
|
||||
"""Data tied to the conversation ID."""
|
||||
|
||||
_intent_agent_only = False
|
||||
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Set language for pipeline."""
|
||||
self.language = self.pipeline.language or self.hass.config.language
|
||||
@@ -639,13 +640,18 @@ class PipelineRun:
|
||||
self._device_id = device_id
|
||||
self._start_debug_recording_thread()
|
||||
|
||||
data = {
|
||||
data: dict[str, Any] = {
|
||||
"pipeline": self.pipeline.id,
|
||||
"language": self.language,
|
||||
"conversation_id": conversation_id,
|
||||
}
|
||||
if self.runner_data is not None:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
data["tts_output"] = {
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
|
||||
|
||||
@@ -1007,19 +1013,36 @@ class PipelineRun:
|
||||
|
||||
yield chunk.audio
|
||||
|
||||
async def prepare_recognize_intent(self) -> None:
|
||||
async def prepare_recognize_intent(self, session: chat_session.ChatSession) -> None:
|
||||
"""Prepare recognizing an intent."""
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass,
|
||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||
self._conversation_data = async_get_pipeline_conversation_data(
|
||||
self.hass, session
|
||||
)
|
||||
|
||||
if agent_info is None:
|
||||
engine = self.pipeline.conversation_engine or "default"
|
||||
raise IntentRecognitionError(
|
||||
code="intent-not-supported",
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
if self._conversation_data.continue_conversation_agent is not None:
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass, self._conversation_data.continue_conversation_agent
|
||||
)
|
||||
self._conversation_data.continue_conversation_agent = None
|
||||
if agent_info is None:
|
||||
raise IntentRecognitionError(
|
||||
code="intent-agent-not-found",
|
||||
message=f"Intent recognition engine {self._conversation_data.continue_conversation_agent} asked for follow-up but is no longer found",
|
||||
)
|
||||
self._intent_agent_only = True
|
||||
|
||||
else:
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass,
|
||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||
)
|
||||
|
||||
if agent_info is None:
|
||||
engine = self.pipeline.conversation_engine or "default"
|
||||
raise IntentRecognitionError(
|
||||
code="intent-not-supported",
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
)
|
||||
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
@@ -1031,7 +1054,7 @@ class PipelineRun:
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None:
|
||||
if self.intent_agent is None or self._conversation_data is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
@@ -1078,7 +1101,7 @@ class PipelineRun:
|
||||
agent_id = self.intent_agent
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally:
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
@@ -1103,12 +1126,16 @@ class PipelineRun:
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents first, if preferred.
|
||||
elif self.pipeline.prefer_local_intents and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
@@ -1191,6 +1218,9 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
if conversation_result.continue_conversation:
|
||||
self._conversation_data.continue_conversation_agent = agent_id
|
||||
|
||||
return speech
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
@@ -1213,36 +1243,31 @@ class PipelineRun:
|
||||
tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH
|
||||
|
||||
try:
|
||||
options_supported = await tts.async_support_options(
|
||||
self.hass,
|
||||
engine,
|
||||
self.pipeline.tts_language,
|
||||
tts_options,
|
||||
self.tts_stream = tts.async_create_stream(
|
||||
hass=self.hass,
|
||||
engine=engine,
|
||||
language=self.pipeline.tts_language,
|
||||
options=tts_options,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
raise TextToSpeechError(
|
||||
code="tts-not-supported",
|
||||
message=f"Text-to-speech engine '{engine}' not found",
|
||||
) from err
|
||||
if not options_supported:
|
||||
raise TextToSpeechError(
|
||||
code="tts-not-supported",
|
||||
message=(
|
||||
f"Text-to-speech engine {engine} "
|
||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}"
|
||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}:"
|
||||
f" {err}"
|
||||
),
|
||||
)
|
||||
|
||||
self.tts_engine = engine
|
||||
self.tts_options = tts_options
|
||||
) from err
|
||||
|
||||
async def text_to_speech(self, tts_input: str) -> None:
|
||||
"""Run text-to-speech portion of pipeline."""
|
||||
assert self.tts_stream is not None
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.TTS_START,
|
||||
{
|
||||
"engine": self.tts_engine,
|
||||
"engine": self.tts_stream.engine,
|
||||
"language": self.pipeline.tts_language,
|
||||
"voice": self.pipeline.tts_voice,
|
||||
"tts_input": tts_input,
|
||||
@@ -1255,14 +1280,9 @@ class PipelineRun:
|
||||
tts_media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
tts_input,
|
||||
engine=self.tts_engine,
|
||||
language=self.pipeline.tts_language,
|
||||
options=self.tts_options,
|
||||
)
|
||||
tts_media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
tts_media_id,
|
||||
None,
|
||||
engine=self.tts_stream.engine,
|
||||
language=self.tts_stream.language,
|
||||
options=self.tts_stream.options,
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||
@@ -1271,10 +1291,12 @@ class PipelineRun:
|
||||
message="Unexpected error during text-to-speech",
|
||||
) from src_error
|
||||
|
||||
_LOGGER.debug("TTS result %s", tts_media)
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
tts_output = {
|
||||
"media_id": tts_media_id,
|
||||
**asdict(tts_media),
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
self.process_event(
|
||||
@@ -1454,8 +1476,8 @@ class PipelineInput:
|
||||
|
||||
run: PipelineRun
|
||||
|
||||
conversation_id: str
|
||||
"""Identifier for the conversation."""
|
||||
session: chat_session.ChatSession
|
||||
"""Session for the conversation."""
|
||||
|
||||
stt_metadata: stt.SpeechMetadata | None = None
|
||||
"""Metadata of stt input audio. Required when start_stage = stt."""
|
||||
@@ -1480,7 +1502,9 @@ class PipelineInput:
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
self.run.start(conversation_id=self.conversation_id, device_id=self.device_id)
|
||||
self.run.start(
|
||||
conversation_id=self.session.conversation_id, device_id=self.device_id
|
||||
)
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
|
||||
@@ -1564,7 +1588,7 @@ class PipelineInput:
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.conversation_id,
|
||||
self.session.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
@@ -1648,7 +1672,7 @@ class PipelineInput:
|
||||
<= PIPELINE_STAGE_ORDER.index(PipelineStage.INTENT)
|
||||
<= end_stage_index
|
||||
):
|
||||
prepare_tasks.append(self.run.prepare_recognize_intent())
|
||||
prepare_tasks.append(self.run.prepare_recognize_intent(self.session))
|
||||
|
||||
if (
|
||||
start_stage_index
|
||||
@@ -1927,7 +1951,7 @@ class PipelineRunDebug:
|
||||
|
||||
|
||||
class PipelineStore(Store[SerializedPipelineStorageCollection]):
|
||||
"""Store entity registry data."""
|
||||
"""Store pipeline data."""
|
||||
|
||||
async def _async_migrate_func(
|
||||
self,
|
||||
@@ -2009,3 +2033,37 @@ async def async_run_migrations(hass: HomeAssistant) -> None:
|
||||
|
||||
for pipeline, attr_updates in updates:
|
||||
await async_update_pipeline(hass, pipeline, **attr_updates)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PipelineConversationData:
|
||||
"""Hold data for the duration of a conversation."""
|
||||
|
||||
continue_conversation_agent: str | None = None
|
||||
"""The agent that requested the conversation to be continued."""
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_pipeline_conversation_data(
|
||||
hass: HomeAssistant, session: chat_session.ChatSession
|
||||
) -> PipelineConversationData:
|
||||
"""Get the pipeline data for a specific conversation."""
|
||||
all_conversation_data = hass.data.get(KEY_PIPELINE_CONVERSATION_DATA)
|
||||
if all_conversation_data is None:
|
||||
all_conversation_data = {}
|
||||
hass.data[KEY_PIPELINE_CONVERSATION_DATA] = all_conversation_data
|
||||
|
||||
data = all_conversation_data.get(session.conversation_id)
|
||||
|
||||
if data is not None:
|
||||
return data
|
||||
|
||||
@callback
|
||||
def do_cleanup() -> None:
|
||||
"""Handle cleanup."""
|
||||
all_conversation_data.pop(session.conversation_id)
|
||||
|
||||
session.async_on_cleanup(do_cleanup)
|
||||
|
||||
data = all_conversation_data[session.conversation_id] = PipelineConversationData()
|
||||
return data
|
||||
|
||||
@@ -239,7 +239,7 @@ async def websocket_run(
|
||||
with chat_session.async_get_chat_session(
|
||||
hass, msg.get("conversation_id")
|
||||
) as session:
|
||||
input_args["conversation_id"] = session.conversation_id
|
||||
input_args["session"] = session
|
||||
pipeline_input = PipelineInput(**input_args)
|
||||
|
||||
try:
|
||||
|
||||
86
homeassistant/components/azure_storage/__init__.py
Normal file
86
homeassistant/components/azure_storage/__init__.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""The Azure Storage integration."""
|
||||
|
||||
from aiohttp import ClientTimeout
|
||||
from azure.core.exceptions import (
|
||||
ClientAuthenticationError,
|
||||
HttpResponseError,
|
||||
ResourceNotFoundError,
|
||||
)
|
||||
from azure.core.pipeline.transport._aiohttp import (
|
||||
AioHttpTransport,
|
||||
) # need to import from private file, as it is not properly imported in the init
|
||||
from azure.storage.blob.aio import ContainerClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_ACCOUNT_NAME,
|
||||
CONF_CONTAINER_NAME,
|
||||
CONF_STORAGE_ACCOUNT_KEY,
|
||||
DATA_BACKUP_AGENT_LISTENERS,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
type AzureStorageConfigEntry = ConfigEntry[ContainerClient]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AzureStorageConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Azure Storage integration."""
|
||||
# set increase aiohttp timeout for long running operations (up/download)
|
||||
session = async_create_clientsession(
|
||||
hass, timeout=ClientTimeout(connect=10, total=12 * 60 * 60)
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=f"https://{entry.data[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=entry.data[CONF_CONTAINER_NAME],
|
||||
credential=entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=session),
|
||||
)
|
||||
|
||||
try:
|
||||
if not await container_client.exists():
|
||||
await container_client.create_container()
|
||||
except ResourceNotFoundError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="account_not_found",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except ClientAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except HttpResponseError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="cannot_connect",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
|
||||
entry.runtime_data = container_client
|
||||
|
||||
def _async_notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(_async_notify_backup_listeners))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AzureStorageConfigEntry
|
||||
) -> bool:
|
||||
"""Unload an Azure Storage config entry."""
|
||||
return True
|
||||
182
homeassistant/components/azure_storage/backup.py
Normal file
182
homeassistant/components/azure_storage/backup.py
Normal file
@@ -0,0 +1,182 @@
|
||||
"""Support for Azure Storage backup."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import AsyncIterator, Callable, Coroutine
|
||||
from functools import wraps
|
||||
import json
|
||||
import logging
|
||||
from typing import Any, Concatenate
|
||||
|
||||
from azure.core.exceptions import HttpResponseError
|
||||
from azure.storage.blob import BlobProperties
|
||||
|
||||
from homeassistant.components.backup import (
|
||||
AgentBackup,
|
||||
BackupAgent,
|
||||
BackupAgentError,
|
||||
BackupNotFound,
|
||||
suggested_filename,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from . import AzureStorageConfigEntry
|
||||
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
METADATA_VERSION = "1"
|
||||
|
||||
|
||||
async def async_get_backup_agents(
|
||||
hass: HomeAssistant,
|
||||
) -> list[BackupAgent]:
|
||||
"""Return a list of backup agents."""
|
||||
entries: list[AzureStorageConfigEntry] = hass.config_entries.async_loaded_entries(
|
||||
DOMAIN
|
||||
)
|
||||
return [AzureStorageBackupAgent(hass, entry) for entry in entries]
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_backup_agents_listener(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
listener: Callable[[], None],
|
||||
**kwargs: Any,
|
||||
) -> Callable[[], None]:
|
||||
"""Register a listener to be called when agents are added or removed."""
|
||||
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
|
||||
|
||||
@callback
|
||||
def remove_listener() -> None:
|
||||
"""Remove the listener."""
|
||||
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
|
||||
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
|
||||
hass.data.pop(DATA_BACKUP_AGENT_LISTENERS)
|
||||
|
||||
return remove_listener
|
||||
|
||||
|
||||
def handle_backup_errors[_R, **P](
|
||||
func: Callable[Concatenate[AzureStorageBackupAgent, P], Coroutine[Any, Any, _R]],
|
||||
) -> Callable[Concatenate[AzureStorageBackupAgent, P], Coroutine[Any, Any, _R]]:
|
||||
"""Handle backup errors."""
|
||||
|
||||
@wraps(func)
|
||||
async def wrapper(
|
||||
self: AzureStorageBackupAgent, *args: P.args, **kwargs: P.kwargs
|
||||
) -> _R:
|
||||
try:
|
||||
return await func(self, *args, **kwargs)
|
||||
except HttpResponseError as err:
|
||||
_LOGGER.debug(
|
||||
"Error during backup in %s: Status %s, message %s",
|
||||
func.__name__,
|
||||
err.status_code,
|
||||
err.message,
|
||||
exc_info=True,
|
||||
)
|
||||
raise BackupAgentError(
|
||||
f"Error during backup operation in {func.__name__}:"
|
||||
f" Status {err.status_code}, message: {err.message}"
|
||||
) from err
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class AzureStorageBackupAgent(BackupAgent):
|
||||
"""Azure storage backup agent."""
|
||||
|
||||
domain = DOMAIN
|
||||
|
||||
def __init__(self, hass: HomeAssistant, entry: AzureStorageConfigEntry) -> None:
|
||||
"""Initialize the Azure storage backup agent."""
|
||||
super().__init__()
|
||||
self._client = entry.runtime_data
|
||||
self.name = entry.title
|
||||
self.unique_id = entry.entry_id
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_download_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AsyncIterator[bytes]:
|
||||
"""Download a backup file."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
raise BackupNotFound(f"Backup {backup_id} not found")
|
||||
download_stream = await self._client.download_blob(blob.name)
|
||||
return download_stream.chunks()
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_upload_backup(
|
||||
self,
|
||||
*,
|
||||
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
|
||||
backup: AgentBackup,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Upload a backup."""
|
||||
|
||||
metadata = {
|
||||
"metadata_version": METADATA_VERSION,
|
||||
"backup_id": backup.backup_id,
|
||||
"backup_metadata": json.dumps(backup.as_dict()),
|
||||
}
|
||||
|
||||
await self._client.upload_blob(
|
||||
name=suggested_filename(backup),
|
||||
metadata=metadata,
|
||||
data=await open_stream(),
|
||||
length=backup.size,
|
||||
)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_delete_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
"""Delete a backup file."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
return
|
||||
await self._client.delete_blob(blob.name)
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
|
||||
"""List backups."""
|
||||
backups: list[AgentBackup] = []
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
metadata = blob.metadata
|
||||
|
||||
if metadata.get("metadata_version") == METADATA_VERSION:
|
||||
backups.append(
|
||||
AgentBackup.from_dict(json.loads(metadata["backup_metadata"]))
|
||||
)
|
||||
|
||||
return backups
|
||||
|
||||
@handle_backup_errors
|
||||
async def async_get_backup(
|
||||
self,
|
||||
backup_id: str,
|
||||
**kwargs: Any,
|
||||
) -> AgentBackup | None:
|
||||
"""Return a backup."""
|
||||
blob = await self._find_blob_by_backup_id(backup_id)
|
||||
if blob is None:
|
||||
return None
|
||||
|
||||
return AgentBackup.from_dict(json.loads(blob.metadata["backup_metadata"]))
|
||||
|
||||
async def _find_blob_by_backup_id(self, backup_id: str) -> BlobProperties | None:
|
||||
"""Find a blob by backup id."""
|
||||
async for blob in self._client.list_blobs(include="metadata"):
|
||||
if (
|
||||
backup_id == blob.metadata.get("backup_id", "")
|
||||
and blob.metadata.get("metadata_version") == METADATA_VERSION
|
||||
):
|
||||
return blob
|
||||
return None
|
||||
160
homeassistant/components/azure_storage/config_flow.py
Normal file
160
homeassistant/components/azure_storage/config_flow.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Config flow for Azure Storage integration."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from azure.core.exceptions import ClientAuthenticationError, ResourceNotFoundError
|
||||
from azure.core.pipeline.transport._aiohttp import (
|
||||
AioHttpTransport,
|
||||
) # need to import from private file, as it is not properly imported in the init
|
||||
from azure.storage.blob.aio import ContainerClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import (
|
||||
CONF_ACCOUNT_NAME,
|
||||
CONF_CONTAINER_NAME,
|
||||
CONF_STORAGE_ACCOUNT_KEY,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for azure storage."""
|
||||
|
||||
def get_account_url(self, account_name: str) -> str:
|
||||
"""Get the account URL."""
|
||||
return f"https://{account_name}.blob.core.windows.net/"
|
||||
|
||||
async def validate_config(
|
||||
self, container_client: ContainerClient
|
||||
) -> dict[str, str]:
|
||||
"""Validate the configuration."""
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
await container_client.exists()
|
||||
except ResourceNotFoundError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except ClientAuthenticationError:
|
||||
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown exception occurred")
|
||||
errors["base"] = "unknown"
|
||||
return errors
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""User step for Azure Storage."""
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NAME): str,
|
||||
vol.Required(
|
||||
CONF_CONTAINER_NAME, default="home-assistant-backups"
|
||||
): str,
|
||||
vol.Required(CONF_STORAGE_ACCOUNT_KEY): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
|
||||
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data={**reauth_entry.data, **user_input},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_STORAGE_ACCOUNT_KEY): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Reconfigure the entry."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(
|
||||
reconfigure_entry.data[CONF_ACCOUNT_NAME]
|
||||
),
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data={**reconfigure_entry.data, **user_input},
|
||||
)
|
||||
return self.async_show_form(
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_CONTAINER_NAME,
|
||||
default=reconfigure_entry.data[CONF_CONTAINER_NAME],
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_STORAGE_ACCOUNT_KEY,
|
||||
default=reconfigure_entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
16
homeassistant/components/azure_storage/const.py
Normal file
16
homeassistant/components/azure_storage/const.py
Normal file
@@ -0,0 +1,16 @@
|
||||
"""Constants for the Azure Storage integration."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from typing import Final
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DOMAIN: Final = "azure_storage"
|
||||
|
||||
CONF_STORAGE_ACCOUNT_KEY: Final = "storage_account_key"
|
||||
CONF_ACCOUNT_NAME: Final = "account_name"
|
||||
CONF_CONTAINER_NAME: Final = "container_name"
|
||||
|
||||
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
|
||||
f"{DOMAIN}.backup_agent_listeners"
|
||||
)
|
||||
12
homeassistant/components/azure_storage/manifest.json
Normal file
12
homeassistant/components/azure_storage/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "azure_storage",
|
||||
"name": "Azure Storage",
|
||||
"codeowners": ["@zweckj"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/azure_storage",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["azure-storage-blob"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["azure-storage-blob==12.24.0"]
|
||||
}
|
||||
133
homeassistant/components/azure_storage/quality_scale.yaml
Normal file
133
homeassistant/components/azure_storage/quality_scale.yaml
Normal file
@@ -0,0 +1,133 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: Integration does not register custom actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not poll.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any custom actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities of this integration does not explicitly subscribe to events.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have any configuration parameters.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
integration-owner: done
|
||||
log-when-unavailable:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have platforms.
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
diagnostics:
|
||||
status: exempt
|
||||
comment: |
|
||||
There is no data to diagnose.
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service and does not support discovery.
|
||||
docs-data-update:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not poll or push.
|
||||
docs-examples:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration only serves backup.
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration is a cloud service.
|
||||
docs-supported-functions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
reconfiguration-flow: done
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration connects to a single service.
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
72
homeassistant/components/azure_storage/strings.json
Normal file
72
homeassistant/components/azure_storage/strings.json
Normal file
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"config": {
|
||||
"error": {
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"storage_account_key": "Storage account key",
|
||||
"account_name": "Account name",
|
||||
"container_name": "Container name"
|
||||
},
|
||||
"data_description": {
|
||||
"storage_account_key": "Storage account access key used for authorization",
|
||||
"account_name": "Name of the storage account",
|
||||
"container_name": "Name of the storage container to be used (will be created if it does not exist)"
|
||||
},
|
||||
"description": "Set up an Azure (Blob) storage account to be used for backups.",
|
||||
"title": "Add Azure storage account"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
|
||||
},
|
||||
"description": "Provide a new storage account key.",
|
||||
"title": "Reauthenticate Azure storage account"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"container_name": "[%key:component::azure_storage::config::step::user::data::container_name%]",
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"container_name": "[%key:component::azure_storage::config::step::user::data_description::container_name%]",
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
|
||||
},
|
||||
"description": "Change the settings of the Azure storage integration.",
|
||||
"title": "Reconfigure Azure storage account"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"container_not_found": {
|
||||
"title": "Storage container not found",
|
||||
"description": "The storage container {container_name} has not been found in the storage account. Please re-create it manually, then fix this issue."
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"account_not_found": {
|
||||
"message": "Storage account {account_name} not found"
|
||||
},
|
||||
"cannot_connect": {
|
||||
"message": "Can not connect to storage account {account_name}"
|
||||
},
|
||||
"invalid_auth": {
|
||||
"message": "Authentication failed for storage account {account_name}"
|
||||
},
|
||||
"container_not_found": {
|
||||
"message": "Storage container {container_name} not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,8 @@
|
||||
"""The Backup integration."""
|
||||
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -32,6 +32,7 @@ from .manager import (
|
||||
IdleEvent,
|
||||
IncorrectPasswordError,
|
||||
ManagerBackup,
|
||||
ManagerStateEvent,
|
||||
NewBackup,
|
||||
RestoreBackupEvent,
|
||||
RestoreBackupStage,
|
||||
@@ -63,12 +64,12 @@ __all__ = [
|
||||
"IncorrectPasswordError",
|
||||
"LocalBackupAgent",
|
||||
"ManagerBackup",
|
||||
"ManagerStateEvent",
|
||||
"NewBackup",
|
||||
"RestoreBackupEvent",
|
||||
"RestoreBackupStage",
|
||||
"RestoreBackupState",
|
||||
"WrittenBackup",
|
||||
"async_get_manager",
|
||||
"suggested_filename",
|
||||
"suggested_filename_from_name_date",
|
||||
]
|
||||
@@ -91,7 +92,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
backup_manager = BackupManager(hass, reader_writer)
|
||||
hass.data[DATA_MANAGER] = backup_manager
|
||||
await backup_manager.async_setup()
|
||||
try:
|
||||
await backup_manager.async_setup()
|
||||
except Exception as err:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_exception(err)
|
||||
raise
|
||||
else:
|
||||
hass.data[DATA_BACKUP].manager_ready.set_result(None)
|
||||
|
||||
async_register_websocket_handlers(hass, with_hassio)
|
||||
|
||||
@@ -122,15 +129,3 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
async_register_http_views(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_manager(hass: HomeAssistant) -> BackupManager:
|
||||
"""Get the backup manager instance.
|
||||
|
||||
Raises HomeAssistantError if the backup integration is not available.
|
||||
"""
|
||||
if DATA_MANAGER not in hass.data:
|
||||
raise HomeAssistantError("Backup integration is not available")
|
||||
|
||||
return hass.data[DATA_MANAGER]
|
||||
|
||||
38
homeassistant/components/backup/basic_websocket.py
Normal file
38
homeassistant/components/backup/basic_websocket.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""Websocket commands for the Backup integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.backup import async_subscribe_events
|
||||
|
||||
from .const import DATA_MANAGER
|
||||
from .manager import ManagerStateEvent
|
||||
|
||||
|
||||
@callback
|
||||
def async_register_websocket_handlers(hass: HomeAssistant) -> None:
|
||||
"""Register websocket commands."""
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
if DATA_MANAGER in hass.data:
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = async_subscribe_events(hass, on_event)
|
||||
connection.send_result(msg["id"])
|
||||
@@ -12,16 +12,19 @@ from typing import TYPE_CHECKING, Self, TypedDict
|
||||
from cronsim import CronSim
|
||||
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.event import async_call_later, async_track_point_in_time
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import LOGGER
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .models import BackupManagerError, Folder
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .manager import BackupManager, ManagerBackup
|
||||
|
||||
AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID = "automatic_backup_agents_unavailable"
|
||||
|
||||
CRON_PATTERN_DAILY = "{m} {h} * * *"
|
||||
CRON_PATTERN_WEEKLY = "{m} {h} * * {d}"
|
||||
|
||||
@@ -39,6 +42,7 @@ class StoredBackupConfig(TypedDict):
|
||||
"""Represent the stored backup config."""
|
||||
|
||||
agents: dict[str, StoredAgentConfig]
|
||||
automatic_backups_configured: bool
|
||||
create_backup: StoredCreateBackupConfig
|
||||
last_attempted_automatic_backup: str | None
|
||||
last_completed_automatic_backup: str | None
|
||||
@@ -51,6 +55,7 @@ class BackupConfigData:
|
||||
"""Represent loaded backup config data."""
|
||||
|
||||
agents: dict[str, AgentConfig]
|
||||
automatic_backups_configured: bool # only used by frontend
|
||||
create_backup: CreateBackupConfig
|
||||
last_attempted_automatic_backup: datetime | None = None
|
||||
last_completed_automatic_backup: datetime | None = None
|
||||
@@ -88,6 +93,7 @@ class BackupConfigData:
|
||||
agent_id: AgentConfig(protected=agent_data["protected"])
|
||||
for agent_id, agent_data in data["agents"].items()
|
||||
},
|
||||
automatic_backups_configured=data["automatic_backups_configured"],
|
||||
create_backup=CreateBackupConfig(
|
||||
agent_ids=data["create_backup"]["agent_ids"],
|
||||
include_addons=data["create_backup"]["include_addons"],
|
||||
@@ -127,6 +133,7 @@ class BackupConfigData:
|
||||
agents={
|
||||
agent_id: agent.to_dict() for agent_id, agent in self.agents.items()
|
||||
},
|
||||
automatic_backups_configured=self.automatic_backups_configured,
|
||||
create_backup=self.create_backup.to_dict(),
|
||||
last_attempted_automatic_backup=last_attempted,
|
||||
last_completed_automatic_backup=last_completed,
|
||||
@@ -142,10 +149,12 @@ class BackupConfig:
|
||||
"""Initialize backup config."""
|
||||
self.data = BackupConfigData(
|
||||
agents={},
|
||||
automatic_backups_configured=False,
|
||||
create_backup=CreateBackupConfig(),
|
||||
retention=RetentionConfig(),
|
||||
schedule=BackupSchedule(),
|
||||
)
|
||||
self._hass = hass
|
||||
self._manager = manager
|
||||
|
||||
def load(self, stored_config: StoredBackupConfig) -> None:
|
||||
@@ -159,6 +168,7 @@ class BackupConfig:
|
||||
self,
|
||||
*,
|
||||
agents: dict[str, AgentParametersDict] | UndefinedType = UNDEFINED,
|
||||
automatic_backups_configured: bool | UndefinedType = UNDEFINED,
|
||||
create_backup: CreateBackupParametersDict | UndefinedType = UNDEFINED,
|
||||
retention: RetentionParametersDict | UndefinedType = UNDEFINED,
|
||||
schedule: ScheduleParametersDict | UndefinedType = UNDEFINED,
|
||||
@@ -172,8 +182,12 @@ class BackupConfig:
|
||||
self.data.agents[agent_id] = replace(
|
||||
self.data.agents[agent_id], **agent_config
|
||||
)
|
||||
if automatic_backups_configured is not UNDEFINED:
|
||||
self.data.automatic_backups_configured = automatic_backups_configured
|
||||
if create_backup is not UNDEFINED:
|
||||
self.data.create_backup = replace(self.data.create_backup, **create_backup)
|
||||
if "agent_ids" in create_backup:
|
||||
check_unavailable_agents(self._hass, self._manager)
|
||||
if retention is not UNDEFINED:
|
||||
new_retention = RetentionConfig(**retention)
|
||||
if new_retention != self.data.retention:
|
||||
@@ -554,3 +568,46 @@ async def delete_backups_exceeding_configured_count(manager: BackupManager) -> N
|
||||
await manager.async_delete_filtered_backups(
|
||||
include_filter=_automatic_backups_filter, delete_filter=_delete_filter
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def check_unavailable_agents(hass: HomeAssistant, manager: BackupManager) -> None:
|
||||
"""Check for unavailable agents."""
|
||||
if missing_agent_ids := set(manager.config.data.create_backup.agent_ids) - set(
|
||||
manager.backup_agents
|
||||
):
|
||||
LOGGER.debug(
|
||||
"Agents %s are configured for automatic backup but are unavailable",
|
||||
missing_agent_ids,
|
||||
)
|
||||
|
||||
# Remove issues for unavailable agents that are not unavailable anymore.
|
||||
issue_registry = ir.async_get(hass)
|
||||
existing_missing_agent_issue_ids = {
|
||||
issue_id
|
||||
for domain, issue_id in issue_registry.issues
|
||||
if domain == DOMAIN
|
||||
and issue_id.startswith(AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID)
|
||||
}
|
||||
current_missing_agent_issue_ids = {
|
||||
f"{AUTOMATIC_BACKUP_AGENTS_UNAVAILABLE_ISSUE_ID}_{agent_id}": agent_id
|
||||
for agent_id in missing_agent_ids
|
||||
}
|
||||
for issue_id in existing_missing_agent_issue_ids - set(
|
||||
current_missing_agent_issue_ids
|
||||
):
|
||||
ir.async_delete_issue(hass, DOMAIN, issue_id)
|
||||
for issue_id, agent_id in current_missing_agent_issue_ids.items():
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=False,
|
||||
learn_more_url="homeassistant://config/backup",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="automatic_backup_agents_unavailable",
|
||||
translation_placeholders={
|
||||
"agent_id": agent_id,
|
||||
"backup_settings": "/config/backup/settings",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -14,6 +14,7 @@ from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
@@ -32,7 +33,9 @@ from homeassistant.helpers import (
|
||||
instance_id,
|
||||
integration_platform,
|
||||
issue_registry as ir,
|
||||
start,
|
||||
)
|
||||
from homeassistant.helpers.backup import DATA_BACKUP
|
||||
from homeassistant.helpers.json import json_bytes
|
||||
from homeassistant.util import dt as dt_util, json as json_util
|
||||
|
||||
@@ -46,6 +49,7 @@ from .agent import (
|
||||
from .config import (
|
||||
BackupConfig,
|
||||
CreateBackupParametersDict,
|
||||
check_unavailable_agents,
|
||||
delete_backups_exceeding_configured_count,
|
||||
)
|
||||
from .const import (
|
||||
@@ -305,6 +309,12 @@ class DecryptOnDowloadNotSupported(BackupManagerError):
|
||||
_message = "On-the-fly decryption is not supported for this backup."
|
||||
|
||||
|
||||
class BackupManagerExceptionGroup(BackupManagerError, ExceptionGroup):
|
||||
"""Raised when multiple exceptions occur."""
|
||||
|
||||
error_code = "multiple_errors"
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -332,7 +342,9 @@ class BackupManager:
|
||||
# Latest backup event and backup event subscribers
|
||||
self.last_event: ManagerStateEvent = IdleEvent()
|
||||
self.last_non_idle_event: ManagerStateEvent | None = None
|
||||
self._backup_event_subscriptions: list[Callable[[ManagerStateEvent], None]] = []
|
||||
self._backup_event_subscriptions = hass.data[
|
||||
DATA_BACKUP
|
||||
].backup_event_subscriptions
|
||||
|
||||
async def async_setup(self) -> None:
|
||||
"""Set up the backup manager."""
|
||||
@@ -414,6 +426,13 @@ class BackupManager:
|
||||
}
|
||||
)
|
||||
|
||||
@callback
|
||||
def check_unavailable_agents_after_start(hass: HomeAssistant) -> None:
|
||||
"""Check unavailable agents after start."""
|
||||
check_unavailable_agents(hass, self)
|
||||
|
||||
start.async_at_started(self.hass, check_unavailable_agents_after_start)
|
||||
|
||||
async def _add_platform(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
@@ -1279,19 +1298,6 @@ class BackupManager:
|
||||
for subscription in self._backup_event_subscriptions:
|
||||
subscription(event)
|
||||
|
||||
@callback
|
||||
def async_subscribe_events(
|
||||
self,
|
||||
on_event: Callable[[ManagerStateEvent], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe events."""
|
||||
|
||||
def remove_subscription() -> None:
|
||||
self._backup_event_subscriptions.remove(on_event)
|
||||
|
||||
self._backup_event_subscriptions.append(on_event)
|
||||
return remove_subscription
|
||||
|
||||
def _update_issue_backup_failed(self) -> None:
|
||||
"""Update issue registry when a backup fails."""
|
||||
ir.async_create_issue(
|
||||
@@ -1606,10 +1612,24 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
# If there's an unhandled exception, we keep it so we can rethrow it in case
|
||||
# the post backup actions also fail.
|
||||
unhandled_exc = sys.exception()
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
except Exception as err:
|
||||
if not unhandled_exc:
|
||||
raise
|
||||
# If there's an unhandled exception, we wrap both that and the exception
|
||||
# from the post backup actions in an ExceptionGroup so the caller is
|
||||
# aware of both exceptions.
|
||||
raise BackupManagerExceptionGroup(
|
||||
f"Multiple errors when creating backup: {unhandled_exc}, {err}",
|
||||
[unhandled_exc, err],
|
||||
) from None
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1621,7 +1641,13 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""Generate backup contents and return the size."""
|
||||
if not tar_file_path:
|
||||
tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar"
|
||||
make_backup_dir(tar_file_path.parent)
|
||||
try:
|
||||
make_backup_dir(tar_file_path.parent)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Failed to create dir {tar_file_path.parent}: "
|
||||
f"{err} ({err.__class__.__name__})"
|
||||
) from err
|
||||
|
||||
excludes = EXCLUDE_FROM_BACKUP
|
||||
if not database_included:
|
||||
@@ -1659,7 +1685,14 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
file_filter=is_excluded_by_filter,
|
||||
arcname="data",
|
||||
)
|
||||
return (tar_file_path, tar_file_path.stat().st_size)
|
||||
try:
|
||||
stat_result = tar_file_path.stat()
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error getting size of {tar_file_path}: "
|
||||
f"{err} ({err.__class__.__name__})"
|
||||
) from err
|
||||
return (tar_file_path, stat_result.st_size)
|
||||
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "calculated",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.1.4"]
|
||||
"requirements": ["cronsim==2.6", "securetar==2025.2.1"]
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ if TYPE_CHECKING:
|
||||
STORE_DELAY_SAVE = 30
|
||||
STORAGE_KEY = DOMAIN
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_VERSION_MINOR = 4
|
||||
STORAGE_VERSION_MINOR = 5
|
||||
|
||||
|
||||
class StoredBackupData(TypedDict):
|
||||
@@ -67,6 +67,11 @@ class _BackupStore(Store[StoredBackupData]):
|
||||
data["config"]["retention"]["copies"] = None
|
||||
if data["config"]["retention"]["days"] == 0:
|
||||
data["config"]["retention"]["days"] = None
|
||||
if old_minor_version < 5:
|
||||
# Version 1.5 adds automatic_backups_configured
|
||||
data["config"]["automatic_backups_configured"] = (
|
||||
data["config"]["create_backup"]["password"] is not None
|
||||
)
|
||||
|
||||
# Note: We allow reading data with major version 2.
|
||||
# Reject if major version is higher than 2.
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
{
|
||||
"issues": {
|
||||
"automatic_backup_agents_unavailable": {
|
||||
"title": "The backup location {agent_id} is unavailable",
|
||||
"description": "The backup location `{agent_id}` is unavailable but is still configured for automatic backups.\n\nPlease visit the [automatic backup configuration page]({backup_settings}) to review and update your backup locations. Backups will not be uploaded to selected locations that are unavailable."
|
||||
},
|
||||
"automatic_backup_failed_create": {
|
||||
"title": "Automatic backup could not be created",
|
||||
"description": "The automatic backup could not be created. Please check the logs for more information. Another attempt will be made at the next scheduled time if a backup schedule is configured."
|
||||
|
||||
@@ -10,11 +10,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .config import Day, ScheduleRecurrence
|
||||
from .const import DATA_MANAGER, LOGGER
|
||||
from .manager import (
|
||||
DecryptOnDowloadNotSupported,
|
||||
IncorrectPasswordError,
|
||||
ManagerStateEvent,
|
||||
)
|
||||
from .manager import DecryptOnDowloadNotSupported, IncorrectPasswordError
|
||||
from .models import BackupNotFound, Folder
|
||||
|
||||
|
||||
@@ -34,7 +30,6 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
websocket_api.async_register_command(hass, handle_create_with_automatic_settings)
|
||||
websocket_api.async_register_command(hass, handle_delete)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
websocket_api.async_register_command(hass, handle_subscribe_events)
|
||||
|
||||
websocket_api.async_register_command(hass, handle_config_info)
|
||||
websocket_api.async_register_command(hass, handle_config_update)
|
||||
@@ -352,6 +347,7 @@ async def handle_config_info(
|
||||
{
|
||||
vol.Required("type"): "backup/config/update",
|
||||
vol.Optional("agents"): vol.Schema({str: {"protected": bool}}),
|
||||
vol.Optional("automatic_backups_configured"): bool,
|
||||
vol.Optional("create_backup"): vol.Schema(
|
||||
{
|
||||
vol.Optional("agent_ids"): vol.All([str], vol.Unique()),
|
||||
@@ -400,22 +396,3 @@ def handle_config_update(
|
||||
changes.pop("type")
|
||||
manager.config.update(**changes)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/subscribe_events"})
|
||||
@websocket_api.async_response
|
||||
async def handle_subscribe_events(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to backup events."""
|
||||
|
||||
def on_event(event: ManagerStateEvent) -> None:
|
||||
connection.send_message(websocket_api.event_message(msg["id"], event))
|
||||
|
||||
manager = hass.data[DATA_MANAGER]
|
||||
on_event(manager.last_event)
|
||||
connection.subscriptions[msg["id"]] = manager.async_subscribe_events(on_event)
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
@@ -21,6 +21,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.EVENT,
|
||||
Platform.FAN,
|
||||
Platform.LIGHT,
|
||||
Platform.SELECT,
|
||||
@@ -28,7 +29,6 @@ PLATFORMS = [
|
||||
Platform.TIME,
|
||||
]
|
||||
|
||||
|
||||
KEEP_ALIVE_INTERVAL = timedelta(minutes=1)
|
||||
SYNC_TIME_INTERVAL = timedelta(hours=1)
|
||||
|
||||
|
||||
91
homeassistant/components/balboa/event.py
Normal file
91
homeassistant/components/balboa/event.py
Normal file
@@ -0,0 +1,91 @@
|
||||
"""Support for Balboa events."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from pybalboa import EVENT_UPDATE, SpaClient
|
||||
|
||||
from homeassistant.components.event import EventEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from . import BalboaConfigEntry
|
||||
from .entity import BalboaEntity
|
||||
|
||||
FAULT = "fault"
|
||||
FAULT_DATE = "fault_date"
|
||||
REQUEST_FAULT_LOG_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
FAULT_MESSAGE_CODE_MAP: dict[int, str] = {
|
||||
15: "sensor_out_of_sync",
|
||||
16: "low_flow",
|
||||
17: "flow_failed",
|
||||
18: "settings_reset",
|
||||
19: "priming_mode",
|
||||
20: "clock_failed",
|
||||
21: "settings_reset",
|
||||
22: "memory_failure",
|
||||
26: "service_sensor_sync",
|
||||
27: "heater_dry",
|
||||
28: "heater_may_be_dry",
|
||||
29: "water_too_hot",
|
||||
30: "heater_too_hot",
|
||||
31: "sensor_a_fault",
|
||||
32: "sensor_b_fault",
|
||||
34: "pump_stuck",
|
||||
35: "hot_fault",
|
||||
36: "gfci_test_failed",
|
||||
37: "standby_mode",
|
||||
}
|
||||
FAULT_EVENT_TYPES = sorted(set(FAULT_MESSAGE_CODE_MAP.values()))
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: BalboaConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the spa's events."""
|
||||
async_add_entities([BalboaEventEntity(entry.runtime_data)])
|
||||
|
||||
|
||||
class BalboaEventEntity(BalboaEntity, EventEntity):
|
||||
"""Representation of a Balboa event entity."""
|
||||
|
||||
_attr_event_types = FAULT_EVENT_TYPES
|
||||
_attr_translation_key = FAULT
|
||||
|
||||
def __init__(self, spa: SpaClient) -> None:
|
||||
"""Initialize a Balboa event entity."""
|
||||
super().__init__(spa, FAULT)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self) -> None:
|
||||
"""Handle the fault event."""
|
||||
if not (fault := self._client.fault):
|
||||
return
|
||||
fault_date = fault.fault_datetime.isoformat()
|
||||
if self.state_attributes.get(FAULT_DATE) != fault_date:
|
||||
self._trigger_event(
|
||||
FAULT_MESSAGE_CODE_MAP.get(fault.message_code, fault.message),
|
||||
{FAULT_DATE: fault_date, "code": fault.message_code},
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(self._client.on(EVENT_UPDATE, self._async_handle_event))
|
||||
|
||||
async def request_fault_log(now: datetime | None = None) -> None:
|
||||
"""Request the most recent fault log."""
|
||||
await self._client.request_fault_log()
|
||||
|
||||
await request_fault_log()
|
||||
self.async_on_remove(
|
||||
async_track_time_interval(
|
||||
self.hass, request_fault_log, REQUEST_FAULT_LOG_INTERVAL
|
||||
)
|
||||
)
|
||||
@@ -57,6 +57,35 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"fault": {
|
||||
"name": "Fault",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"sensor_out_of_sync": "Sensors are out of sync",
|
||||
"low_flow": "The water flow is low",
|
||||
"flow_failed": "The water flow has failed",
|
||||
"settings_reset": "The settings have been reset",
|
||||
"priming_mode": "Priming mode",
|
||||
"clock_failed": "The clock has failed",
|
||||
"memory_failure": "Program memory failure",
|
||||
"service_sensor_sync": "Sensors are out of sync -- call for service",
|
||||
"heater_dry": "The heater is dry",
|
||||
"heater_may_be_dry": "The heater may be dry",
|
||||
"water_too_hot": "The water is too hot",
|
||||
"heater_too_hot": "The heater is too hot",
|
||||
"sensor_a_fault": "Sensor A fault",
|
||||
"sensor_b_fault": "Sensor B fault",
|
||||
"pump_stuck": "A pump may be stuck on",
|
||||
"hot_fault": "Hot fault",
|
||||
"gfci_test_failed": "The GFCI test failed",
|
||||
"standby_mode": "Standby mode (hold mode)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"pump": {
|
||||
"name": "Pump {index}"
|
||||
|
||||
@@ -28,7 +28,7 @@
|
||||
"name": "Activity",
|
||||
"state": {
|
||||
"available": "Available",
|
||||
"charging": "Charging",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"unavailable": "Unavailable",
|
||||
"error": "Error",
|
||||
"offline": "Offline"
|
||||
|
||||
@@ -311,11 +311,24 @@ async def async_update_device(
|
||||
update the device with the new location so they can
|
||||
figure out where the adapter is.
|
||||
"""
|
||||
address = details[ADAPTER_ADDRESS]
|
||||
connections = {(dr.CONNECTION_BLUETOOTH, address)}
|
||||
device_registry = dr.async_get(hass)
|
||||
# We only have one device for the config entry
|
||||
# so if the address has been corrected, make
|
||||
# sure the device entry reflects the correct
|
||||
# address
|
||||
for device in dr.async_entries_for_config_entry(device_registry, entry.entry_id):
|
||||
for conn_type, conn_value in device.connections:
|
||||
if conn_type == dr.CONNECTION_BLUETOOTH and conn_value != address:
|
||||
device_registry.async_update_device(
|
||||
device.id, new_connections=connections
|
||||
)
|
||||
break
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
name=adapter_human_name(adapter, details[ADAPTER_ADDRESS]),
|
||||
connections={(dr.CONNECTION_BLUETOOTH, details[ADAPTER_ADDRESS])},
|
||||
name=adapter_human_name(adapter, address),
|
||||
connections=connections,
|
||||
manufacturer=details[ADAPTER_MANUFACTURER],
|
||||
model=adapter_model(details),
|
||||
sw_version=details.get(ADAPTER_SW_VERSION),
|
||||
@@ -342,9 +355,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
)
|
||||
)
|
||||
return True
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
assert source_entry is not None
|
||||
source_domain = entry.data[CONF_SOURCE_DOMAIN]
|
||||
if mac_manufacturer := await get_manufacturer_from_mac(address):
|
||||
manufacturer = f"{mac_manufacturer} ({source_domain})"
|
||||
|
||||
@@ -186,16 +186,28 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a flow initialized by an external scanner."""
|
||||
source = user_input[CONF_SOURCE]
|
||||
await self.async_set_unique_id(source)
|
||||
source_config_entry_id = user_input[CONF_SOURCE_CONFIG_ENTRY_ID]
|
||||
data = {
|
||||
CONF_SOURCE: source,
|
||||
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
|
||||
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID],
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id,
|
||||
CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID],
|
||||
}
|
||||
self._abort_if_unique_id_configured(updates=data)
|
||||
manager = get_manager()
|
||||
scanner = manager.async_scanner_by_source(source)
|
||||
for entry in self._async_current_entries(include_ignore=False):
|
||||
# If the mac address needs to be corrected, migrate
|
||||
# the config entry to the new mac address
|
||||
if (
|
||||
entry.data.get(CONF_SOURCE_CONFIG_ENTRY_ID) == source_config_entry_id
|
||||
and entry.unique_id != source
|
||||
):
|
||||
self.hass.config_entries.async_update_entry(
|
||||
entry, unique_id=source, data={**entry.data, **data}
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
scanner = get_manager().async_scanner_by_source(source)
|
||||
assert scanner is not None
|
||||
return self.async_create_entry(title=scanner.name, data=data)
|
||||
|
||||
|
||||
@@ -21,6 +21,6 @@
|
||||
"bluetooth-auto-recovery==1.4.4",
|
||||
"bluetooth-data-tools==1.23.4",
|
||||
"dbus-fast==2.33.0",
|
||||
"habluetooth==3.24.0"
|
||||
"habluetooth==3.24.1"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -138,7 +138,7 @@
|
||||
"name": "Charging status",
|
||||
"state": {
|
||||
"default": "Default",
|
||||
"charging": "Charging",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"error": "Error",
|
||||
"complete": "Complete",
|
||||
"fully_charged": "Fully charged",
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
"""Virtual integration: Burbank Water and Power (BWP)."""
|
||||
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "burbank_water_and_power",
|
||||
"name": "Burbank Water and Power (BWP)",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "opower"
|
||||
}
|
||||
@@ -138,6 +138,8 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
await self.hass.async_add_executor_job(
|
||||
partial(self._calendar.save_todo, **item_data),
|
||||
)
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV save error: {err}") from err
|
||||
|
||||
@@ -172,6 +174,8 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
obj_type="todo",
|
||||
),
|
||||
)
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV save error: {err}") from err
|
||||
|
||||
@@ -195,3 +199,5 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
await self.hass.async_add_executor_job(item.delete)
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV delete error: {err}") from err
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
|
||||
@@ -8,6 +8,6 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiostreammagic"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiostreammagic==2.10.0"],
|
||||
"requirements": ["aiostreammagic==2.11.0"],
|
||||
"zeroconf": ["_stream-magic._tcp.local.", "_smoip._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -68,7 +68,6 @@ from .const import ( # noqa: F401
|
||||
FAN_ON,
|
||||
FAN_TOP,
|
||||
HVAC_MODES,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
PRESET_ACTIVITY,
|
||||
PRESET_AWAY,
|
||||
|
||||
@@ -126,7 +126,6 @@ DEFAULT_MAX_HUMIDITY = 99
|
||||
|
||||
DOMAIN = "climate"
|
||||
|
||||
INTENT_GET_TEMPERATURE = "HassClimateGetTemperature"
|
||||
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
|
||||
|
||||
SERVICE_SET_AUX_HEAT = "set_aux_heat"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
"""Intents for the client integration."""
|
||||
"""Intents for the climate integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -11,7 +11,6 @@ from homeassistant.helpers import config_validation as cv, intent
|
||||
from . import (
|
||||
ATTR_TEMPERATURE,
|
||||
DOMAIN,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
ClimateEntityFeature,
|
||||
@@ -20,49 +19,9 @@ from . import (
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the climate intents."""
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
intent.async_register(hass, SetTemperatureIntent())
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name, area_name=area, domains=[DOMAIN], assistant=intent_obj.assistant
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
class SetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle SetTemperature intents."""
|
||||
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.10.1"]
|
||||
"requirements": ["aiocomelit==0.11.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.26"]
|
||||
}
|
||||
|
||||
@@ -62,12 +62,14 @@ class ConversationResult:
|
||||
|
||||
response: intent.IntentResponse
|
||||
conversation_id: str | None = None
|
||||
continue_conversation: bool = False
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"response": self.response.as_dict(),
|
||||
"conversation_id": self.conversation_id,
|
||||
"continue_conversation": self.continue_conversation,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -48,6 +48,7 @@ COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [
|
||||
Platform.TIME,
|
||||
Platform.UPDATE,
|
||||
Platform.VACUUM,
|
||||
Platform.VALVE,
|
||||
Platform.WATER_HEATER,
|
||||
Platform.WEATHER,
|
||||
]
|
||||
|
||||
89
homeassistant/components/demo/valve.py
Normal file
89
homeassistant/components/demo/valve.py
Normal file
@@ -0,0 +1,89 @@
|
||||
"""Demo valve platform that implements valves."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.valve import ValveEntity, ValveEntityFeature, ValveState
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
OPEN_CLOSE_DELAY = 2 # Used to give a realistic open/close experience in frontend
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Demo config entry."""
|
||||
async_add_entities(
|
||||
[
|
||||
DemoValve("Front Garden", ValveState.OPEN),
|
||||
DemoValve("Orchard", ValveState.CLOSED),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DemoValve(ValveEntity):
|
||||
"""Representation of a Demo valve."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
state: str,
|
||||
moveable: bool = True,
|
||||
) -> None:
|
||||
"""Initialize the valve."""
|
||||
self._attr_name = name
|
||||
if moveable:
|
||||
self._attr_supported_features = (
|
||||
ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE
|
||||
)
|
||||
self._state = state
|
||||
self._moveable = moveable
|
||||
|
||||
@property
|
||||
def is_open(self) -> bool:
|
||||
"""Return true if valve is open."""
|
||||
return self._state == ValveState.OPEN
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool:
|
||||
"""Return true if valve is opening."""
|
||||
return self._state == ValveState.OPENING
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool:
|
||||
"""Return true if valve is closing."""
|
||||
return self._state == ValveState.CLOSING
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool:
|
||||
"""Return true if valve is closed."""
|
||||
return self._state == ValveState.CLOSED
|
||||
|
||||
@property
|
||||
def reports_position(self) -> bool:
|
||||
"""Return True if entity reports position, False otherwise."""
|
||||
return False
|
||||
|
||||
async def async_open_valve(self, **kwargs: Any) -> None:
|
||||
"""Open the valve."""
|
||||
self._state = ValveState.OPENING
|
||||
self.async_write_ha_state()
|
||||
await asyncio.sleep(OPEN_CLOSE_DELAY)
|
||||
self._state = ValveState.OPEN
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_close_valve(self, **kwargs: Any) -> None:
|
||||
"""Close the valve."""
|
||||
self._state = ValveState.CLOSING
|
||||
self.async_write_ha_state()
|
||||
await asyncio.sleep(OPEN_CLOSE_DELAY)
|
||||
self._state = ValveState.CLOSED
|
||||
self.async_write_ha_state()
|
||||
@@ -24,7 +24,14 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
EventStateReportedData,
|
||||
HomeAssistant,
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -32,7 +39,10 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
@@ -200,13 +210,33 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("Could not restore last state: %s", err)
|
||||
|
||||
@callback
|
||||
def calc_derivative(event: Event[EventStateChangedData]) -> None:
|
||||
def on_state_reported(event: Event[EventStateReportedData]) -> None:
|
||||
"""Handle constant sensor state."""
|
||||
if self._attr_native_value == Decimal(0):
|
||||
# If the derivative is zero, and the source sensor hasn't
|
||||
# changed state, then we know it will still be zero.
|
||||
return
|
||||
new_state = event.data["new_state"]
|
||||
if new_state is not None:
|
||||
calc_derivative(
|
||||
new_state, new_state.state, event.data["old_last_reported"]
|
||||
)
|
||||
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
new_state = event.data["new_state"]
|
||||
old_state = event.data["old_state"]
|
||||
if new_state is not None and old_state is not None:
|
||||
calc_derivative(new_state, old_state.state, old_state.last_reported)
|
||||
|
||||
def calc_derivative(
|
||||
new_state: State, old_value: str, old_last_reported: datetime
|
||||
) -> None:
|
||||
"""Handle the sensor state changes."""
|
||||
if (
|
||||
(old_state := event.data["old_state"]) is None
|
||||
or old_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
or (new_state := event.data["new_state"]) is None
|
||||
or new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
if old_value in (STATE_UNKNOWN, STATE_UNAVAILABLE) or new_state.state in (
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
):
|
||||
return
|
||||
|
||||
@@ -220,15 +250,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._state_list = [
|
||||
(time_start, time_end, state)
|
||||
for time_start, time_end, state in self._state_list
|
||||
if (new_state.last_updated - time_end).total_seconds()
|
||||
if (new_state.last_reported - time_end).total_seconds()
|
||||
< self._time_window
|
||||
]
|
||||
|
||||
try:
|
||||
elapsed_time = (
|
||||
new_state.last_updated - old_state.last_updated
|
||||
new_state.last_reported - old_last_reported
|
||||
).total_seconds()
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_state.state)
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_value)
|
||||
new_derivative = (
|
||||
delta_value
|
||||
/ Decimal(elapsed_time)
|
||||
@@ -240,7 +270,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("While calculating derivative: %s", err)
|
||||
except DecimalException as err:
|
||||
_LOGGER.warning(
|
||||
"Invalid state (%s > %s): %s", old_state.state, new_state.state, err
|
||||
"Invalid state (%s > %s): %s", old_value, new_state.state, err
|
||||
)
|
||||
except AssertionError as err:
|
||||
_LOGGER.error("Could not calculate derivative: %s", err)
|
||||
@@ -257,7 +287,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
|
||||
# add latest derivative to the window list
|
||||
self._state_list.append(
|
||||
(old_state.last_updated, new_state.last_updated, new_derivative)
|
||||
(old_last_reported, new_state.last_reported, new_derivative)
|
||||
)
|
||||
|
||||
def calculate_weight(
|
||||
@@ -277,13 +307,19 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
else:
|
||||
derivative = Decimal("0.00")
|
||||
for start, end, value in self._state_list:
|
||||
weight = calculate_weight(start, end, new_state.last_updated)
|
||||
weight = calculate_weight(start, end, new_state.last_reported)
|
||||
derivative = derivative + (value * Decimal(weight))
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._sensor_source_id, calc_derivative
|
||||
self.hass, self._sensor_source_id, on_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_report_event(
|
||||
self.hass, self._sensor_source_id, on_state_reported
|
||||
)
|
||||
)
|
||||
|
||||
@@ -8,6 +8,7 @@ from devolo_plc_api.device_api import (
|
||||
WifiGuestAccessGet,
|
||||
)
|
||||
from devolo_plc_api.plcnet_api import DataRate, LogicalNetwork
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import ATTR_CONNECTIONS
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -43,7 +44,7 @@ class DevoloEntity(Entity):
|
||||
self.entry = entry
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{self.device.ip}",
|
||||
configuration_url=URL.build(scheme="http", host=self.device.ip),
|
||||
identifiers={(DOMAIN, str(self.device.serial_number))},
|
||||
manufacturer="devolo",
|
||||
model=self.device.product,
|
||||
|
||||
@@ -16,6 +16,6 @@
|
||||
"requirements": [
|
||||
"aiodhcpwatcher==1.1.1",
|
||||
"aiodiscover==2.6.1",
|
||||
"cached-ipaddress==0.8.1"
|
||||
"cached-ipaddress==0.9.2"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.2.0"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.3.1"]
|
||||
}
|
||||
|
||||
@@ -105,6 +105,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
AlarmControlPanelEntityFeature.ARM_HOME
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
| AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
)
|
||||
_element: Area
|
||||
|
||||
@@ -204,7 +205,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
ArmedStatus.ARMED_STAY_INSTANT: AlarmControlPanelState.ARMED_HOME,
|
||||
ArmedStatus.ARMED_TO_NIGHT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_NIGHT_INSTANT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_AWAY,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_VACATION,
|
||||
}
|
||||
|
||||
if self._element.alarm_state is None:
|
||||
|
||||
@@ -360,9 +360,9 @@
|
||||
"acb_battery_state": {
|
||||
"name": "Battery state",
|
||||
"state": {
|
||||
"discharging": "Discharging",
|
||||
"discharging": "[%key:common::state::discharging%]",
|
||||
"idle": "[%key:common::state::idle%]",
|
||||
"charging": "Charging",
|
||||
"charging": "[%key:common::state::charging%]",
|
||||
"full": "Full"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.7.2"]
|
||||
"requirements": ["env-canada==0.8.0"]
|
||||
}
|
||||
|
||||
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.7.1"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.9.0"]
|
||||
}
|
||||
|
||||
@@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN
|
||||
from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN
|
||||
from .dashboard import async_setup as async_setup_dashboard
|
||||
from .domain_data import DomainData
|
||||
|
||||
@@ -87,6 +87,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) ->
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
|
||||
"""Remove an esphome config entry."""
|
||||
if mac_address := entry.unique_id:
|
||||
async_remove_scanner(hass, mac_address.upper())
|
||||
if bluetooth_mac_address := entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS):
|
||||
async_remove_scanner(hass, bluetooth_mac_address.upper())
|
||||
await DomainData.get(hass).get_or_create_store(hass, entry).async_remove()
|
||||
|
||||
@@ -284,7 +284,10 @@ class EsphomeAssistSatellite(
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END:
|
||||
assert event.data is not None
|
||||
data_to_send = {
|
||||
"conversation_id": event.data["intent_output"]["conversation_id"] or "",
|
||||
"conversation_id": event.data["intent_output"]["conversation_id"],
|
||||
"continue_conversation": str(
|
||||
int(event.data["intent_output"]["continue_conversation"])
|
||||
),
|
||||
}
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START:
|
||||
assert event.data is not None
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
from math import isfinite
|
||||
from typing import Any, cast
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -238,9 +239,13 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
@esphome_state_property
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity."""
|
||||
if not self._static_info.supports_current_humidity:
|
||||
if (
|
||||
not self._static_info.supports_current_humidity
|
||||
or (val := self._state.current_humidity) is None
|
||||
or not isfinite(val)
|
||||
):
|
||||
return None
|
||||
return round(self._state.current_humidity)
|
||||
return round(val)
|
||||
|
||||
@property
|
||||
@esphome_float_state_property
|
||||
|
||||
@@ -41,6 +41,7 @@ from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_NOISE_PSK,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS,
|
||||
DOMAIN,
|
||||
@@ -508,6 +509,10 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
CONF_ALLOW_SERVICE_CALLS, DEFAULT_ALLOW_SERVICE_CALLS
|
||||
),
|
||||
): bool,
|
||||
vol.Required(
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
default=self.config_entry.options.get(CONF_SUBSCRIBE_LOGS, False),
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return self.async_show_form(step_id="init", data_schema=data_schema)
|
||||
|
||||
@@ -5,18 +5,22 @@ from awesomeversion import AwesomeVersion
|
||||
DOMAIN = "esphome"
|
||||
|
||||
CONF_ALLOW_SERVICE_CALLS = "allow_service_calls"
|
||||
CONF_SUBSCRIBE_LOGS = "subscribe_logs"
|
||||
CONF_DEVICE_NAME = "device_name"
|
||||
CONF_NOISE_PSK = "noise_psk"
|
||||
CONF_BLUETOOTH_MAC_ADDRESS = "bluetooth_mac_address"
|
||||
|
||||
DEFAULT_ALLOW_SERVICE_CALLS = True
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2023.8.0"
|
||||
STABLE_BLE_VERSION_STR = "2025.2.1"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
}
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_VERSION_STR}.html"
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
|
||||
DATA_FFMPEG_PROXY = f"{DOMAIN}.ffmpeg_proxy"
|
||||
|
||||
@@ -13,9 +13,7 @@ from . import CONF_NOISE_PSK
|
||||
from .dashboard import async_get_dashboard
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
|
||||
CONF_MAC_ADDRESS = "mac_address"
|
||||
|
||||
REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, CONF_MAC_ADDRESS}
|
||||
REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, "mac_address", "bluetooth_mac_address"}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -27,13 +25,17 @@ async def async_get_config_entry_diagnostics(
|
||||
diag["config"] = config_entry.as_dict()
|
||||
|
||||
entry_data = config_entry.runtime_data
|
||||
device_info = entry_data.device_info
|
||||
|
||||
if (storage_data := await entry_data.store.async_load()) is not None:
|
||||
diag["storage_data"] = storage_data
|
||||
|
||||
if (
|
||||
config_entry.unique_id
|
||||
and (scanner := async_scanner_by_source(hass, config_entry.unique_id.upper()))
|
||||
device_info
|
||||
and (
|
||||
scanner_mac := device_info.bluetooth_mac_address or device_info.mac_address
|
||||
)
|
||||
and (scanner := async_scanner_by_source(hass, scanner_mac.upper()))
|
||||
and (bluetooth_device := entry_data.bluetooth_device)
|
||||
):
|
||||
diag["bluetooth"] = {
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from functools import partial
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -16,6 +17,7 @@ from aioesphomeapi import (
|
||||
HomeassistantServiceCall,
|
||||
InvalidAuthAPIError,
|
||||
InvalidEncryptionKeyAPIError,
|
||||
LogLevel,
|
||||
ReconnectLogic,
|
||||
RequiresEncryptionAPIError,
|
||||
UserService,
|
||||
@@ -33,6 +35,7 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
@@ -60,7 +63,9 @@ from homeassistant.util.async_ import create_eager_task
|
||||
from .bluetooth import async_connect_scanner
|
||||
from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_BLUETOOTH_MAC_ADDRESS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
@@ -74,8 +79,38 @@ from .domain_data import DomainData
|
||||
# Import config flow so that it's added to the registry
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aioesphomeapi.api_pb2 import ( # type: ignore[attr-defined]
|
||||
SubscribeLogsResponse,
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
LOG_LEVEL_TO_LOGGER = {
|
||||
LogLevel.LOG_LEVEL_NONE: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_ERROR: logging.ERROR,
|
||||
LogLevel.LOG_LEVEL_WARN: logging.WARNING,
|
||||
LogLevel.LOG_LEVEL_INFO: logging.INFO,
|
||||
LogLevel.LOG_LEVEL_CONFIG: logging.INFO,
|
||||
LogLevel.LOG_LEVEL_DEBUG: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_VERBOSE: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_VERY_VERBOSE: logging.DEBUG,
|
||||
}
|
||||
LOGGER_TO_LOG_LEVEL = {
|
||||
logging.NOTSET: LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
logging.DEBUG: LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
logging.INFO: LogLevel.LOG_LEVEL_CONFIG,
|
||||
logging.WARNING: LogLevel.LOG_LEVEL_WARN,
|
||||
logging.ERROR: LogLevel.LOG_LEVEL_ERROR,
|
||||
logging.CRITICAL: LogLevel.LOG_LEVEL_ERROR,
|
||||
}
|
||||
# 7-bit and 8-bit C1 ANSI sequences
|
||||
# https://stackoverflow.com/questions/14693701/how-can-i-remove-the-ansi-escape-sequences-from-a-string-in-python
|
||||
ANSI_ESCAPE_78BIT = re.compile(
|
||||
rb"(?:\x1B[@-Z\\-_]|[\x80-\x9A\x9C-\x9F]|(?:\x1B\[|\x9B)[0-?]*[ -/]*[@-~])"
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_check_firmware_version(
|
||||
@@ -136,6 +171,8 @@ class ESPHomeManager:
|
||||
"""Class to manage an ESPHome connection."""
|
||||
|
||||
__slots__ = (
|
||||
"_cancel_subscribe_logs",
|
||||
"_log_level",
|
||||
"cli",
|
||||
"device_id",
|
||||
"domain_data",
|
||||
@@ -169,6 +206,8 @@ class ESPHomeManager:
|
||||
self.reconnect_logic: ReconnectLogic | None = None
|
||||
self.zeroconf_instance = zeroconf_instance
|
||||
self.entry_data = entry.runtime_data
|
||||
self._cancel_subscribe_logs: CALLBACK_TYPE | None = None
|
||||
self._log_level = LogLevel.LOG_LEVEL_NONE
|
||||
|
||||
async def on_stop(self, event: Event) -> None:
|
||||
"""Cleanup the socket client on HA close."""
|
||||
@@ -341,6 +380,34 @@ class ESPHomeManager:
|
||||
# Re-connection logic will trigger after this
|
||||
await self.cli.disconnect()
|
||||
|
||||
def _async_on_log(self, msg: SubscribeLogsResponse) -> None:
|
||||
"""Handle a log message from the API."""
|
||||
log: bytes = msg.message
|
||||
_LOGGER.log(
|
||||
LOG_LEVEL_TO_LOGGER.get(msg.level, logging.DEBUG),
|
||||
"%s: %s",
|
||||
self.entry.title,
|
||||
ANSI_ESCAPE_78BIT.sub(b"", log).decode("utf-8", "backslashreplace"),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_get_equivalent_log_level(self) -> LogLevel:
|
||||
"""Get the equivalent ESPHome log level for the current logger."""
|
||||
return LOGGER_TO_LOG_LEVEL.get(
|
||||
_LOGGER.getEffectiveLevel(), LogLevel.LOG_LEVEL_VERY_VERBOSE
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_subscribe_logs(self, log_level: LogLevel) -> None:
|
||||
"""Subscribe to logs."""
|
||||
if self._cancel_subscribe_logs is not None:
|
||||
self._cancel_subscribe_logs()
|
||||
self._cancel_subscribe_logs = None
|
||||
self._log_level = log_level
|
||||
self._cancel_subscribe_logs = self.cli.subscribe_logs(
|
||||
self._async_on_log, self._log_level
|
||||
)
|
||||
|
||||
async def _on_connnect(self) -> None:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
entry = self.entry
|
||||
@@ -352,6 +419,8 @@ class ESPHomeManager:
|
||||
cli = self.cli
|
||||
stored_device_name = entry.data.get(CONF_DEVICE_NAME)
|
||||
unique_id_is_mac_address = unique_id and ":" in unique_id
|
||||
if entry.options.get(CONF_SUBSCRIBE_LOGS):
|
||||
self._async_subscribe_logs(self._async_get_equivalent_log_level())
|
||||
results = await asyncio.gather(
|
||||
create_eager_task(cli.device_info()),
|
||||
create_eager_task(cli.list_entities_services()),
|
||||
@@ -363,6 +432,13 @@ class ESPHomeManager:
|
||||
|
||||
device_mac = format_mac(device_info.mac_address)
|
||||
mac_address_matches = unique_id == device_mac
|
||||
if (
|
||||
bluetooth_mac_address := device_info.bluetooth_mac_address
|
||||
) and entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) != bluetooth_mac_address:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={**entry.data, CONF_BLUETOOTH_MAC_ADDRESS: bluetooth_mac_address},
|
||||
)
|
||||
#
|
||||
# Migrate config entry to new unique ID if the current
|
||||
# unique id is not a mac address.
|
||||
@@ -430,7 +506,9 @@ class ESPHomeManager:
|
||||
)
|
||||
)
|
||||
else:
|
||||
bluetooth.async_remove_scanner(hass, device_info.mac_address)
|
||||
bluetooth.async_remove_scanner(
|
||||
hass, device_info.bluetooth_mac_address or device_info.mac_address
|
||||
)
|
||||
|
||||
if device_info.voice_assistant_feature_flags_compat(api_version) and (
|
||||
Platform.ASSIST_SATELLITE not in entry_data.loaded_platforms
|
||||
@@ -503,6 +581,10 @@ class ESPHomeManager:
|
||||
def _async_handle_logging_changed(self, _event: Event) -> None:
|
||||
"""Handle when the logging level changes."""
|
||||
self.cli.set_debug(_LOGGER.isEnabledFor(logging.DEBUG))
|
||||
if self.entry.options.get(CONF_SUBSCRIBE_LOGS) and self._log_level != (
|
||||
new_log_level := self._async_get_equivalent_log_level()
|
||||
):
|
||||
self._async_subscribe_logs(new_log_level)
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start the esphome connection manager."""
|
||||
@@ -545,11 +627,22 @@ class ESPHomeManager:
|
||||
)
|
||||
_setup_services(hass, entry_data, services)
|
||||
|
||||
if entry_data.device_info is not None and entry_data.device_info.name:
|
||||
reconnect_logic.name = entry_data.device_info.name
|
||||
if (device_info := entry_data.device_info) is not None:
|
||||
if device_info.name:
|
||||
reconnect_logic.name = device_info.name
|
||||
if (
|
||||
bluetooth_mac_address := device_info.bluetooth_mac_address
|
||||
) and entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) != bluetooth_mac_address:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_BLUETOOTH_MAC_ADDRESS: bluetooth_mac_address,
|
||||
},
|
||||
)
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=format_mac(entry_data.device_info.mac_address)
|
||||
entry, unique_id=format_mac(device_info.mac_address)
|
||||
)
|
||||
|
||||
await reconnect_logic.start()
|
||||
|
||||
@@ -16,9 +16,9 @@
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.1.1",
|
||||
"aioesphomeapi==29.3.2",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.7.1"
|
||||
"bleak-esphome==2.9.0"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -54,7 +54,8 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"allow_service_calls": "Allow the device to perform Home Assistant actions."
|
||||
"allow_service_calls": "Allow the device to perform Home Assistant actions.",
|
||||
"subscribe_logs": "Subscribe to logs from the device. When enabled, the device will send logs to Home Assistant and you can view them in the logs panel."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -141,11 +141,6 @@ class EzvizCamera(EzvizEntity, Camera):
|
||||
if camera_password:
|
||||
self._attr_supported_features = CameraEntityFeature.STREAM
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.data["status"] != 2
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return true if on."""
|
||||
|
||||
@@ -42,6 +42,11 @@ class EzvizEntity(CoordinatorEntity[EzvizDataUpdateCoordinator], Entity):
|
||||
"""Return coordinator data for this entity."""
|
||||
return self.coordinator.data[self._serial]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.data["status"] != 2
|
||||
|
||||
|
||||
class EzvizBaseEntity(Entity):
|
||||
"""Generic entity for EZVIZ individual poll entities."""
|
||||
@@ -72,3 +77,8 @@ class EzvizBaseEntity(Entity):
|
||||
def data(self) -> dict[str, Any]:
|
||||
"""Return coordinator data for this entity."""
|
||||
return self.coordinator.data[self._serial]
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self.data["status"] != 2
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from propcache.api import cached_property
|
||||
from pyezviz.exceptions import PyEzvizError
|
||||
from pyezviz.utils import decrypt_image
|
||||
|
||||
@@ -62,6 +63,11 @@ class EzvizLastMotion(EzvizEntity, ImageEntity):
|
||||
else None
|
||||
)
|
||||
|
||||
@cached_property
|
||||
def available(self) -> bool:
|
||||
"""Entity gets data from ezviz API so always available."""
|
||||
return True
|
||||
|
||||
async def _async_load_image_from_url(self, url: str) -> Image | None:
|
||||
"""Load an image by url."""
|
||||
if response := await self._fetch_url(url):
|
||||
|
||||
@@ -7,21 +7,21 @@ from collections.abc import Callable, Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyfibaro.fibaro_client import FibaroClient
|
||||
from pyfibaro.fibaro_client import (
|
||||
FibaroAuthenticationFailed,
|
||||
FibaroClient,
|
||||
FibaroConnectFailed,
|
||||
)
|
||||
from pyfibaro.fibaro_data_helper import read_rooms
|
||||
from pyfibaro.fibaro_device import DeviceModel
|
||||
from pyfibaro.fibaro_room import RoomModel
|
||||
from pyfibaro.fibaro_info import InfoModel
|
||||
from pyfibaro.fibaro_scene import SceneModel
|
||||
from pyfibaro.fibaro_state_resolver import FibaroEvent, FibaroStateResolver
|
||||
from requests.exceptions import HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry, DeviceInfo
|
||||
from homeassistant.util import slugify
|
||||
@@ -74,63 +74,31 @@ FIBARO_TYPEMAP = {
|
||||
class FibaroController:
|
||||
"""Initiate Fibaro Controller Class."""
|
||||
|
||||
def __init__(self, config: Mapping[str, Any]) -> None:
|
||||
def __init__(
|
||||
self, fibaro_client: FibaroClient, info: InfoModel, import_plugins: bool
|
||||
) -> None:
|
||||
"""Initialize the Fibaro controller."""
|
||||
|
||||
# The FibaroClient uses the correct API version automatically
|
||||
self._client = FibaroClient(config[CONF_URL])
|
||||
self._client.set_authentication(config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
self._client = fibaro_client
|
||||
self._fibaro_info = info
|
||||
|
||||
# Whether to import devices from plugins
|
||||
self._import_plugins = config[CONF_IMPORT_PLUGINS]
|
||||
self._room_map: dict[int, RoomModel] # Mapping roomId to room object
|
||||
self._import_plugins = import_plugins
|
||||
# Mapping roomId to room object
|
||||
self._room_map = read_rooms(fibaro_client)
|
||||
self._device_map: dict[int, DeviceModel] # Mapping deviceId to device object
|
||||
self.fibaro_devices: dict[Platform, list[DeviceModel]] = defaultdict(
|
||||
list
|
||||
) # List of devices by entity platform
|
||||
# All scenes
|
||||
self._scenes: list[SceneModel] = []
|
||||
self._scenes = self._client.read_scenes()
|
||||
self._callbacks: dict[int, list[Any]] = {} # Update value callbacks by deviceId
|
||||
# Event callbacks by device id
|
||||
self._event_callbacks: dict[int, list[Callable[[FibaroEvent], None]]] = {}
|
||||
self.hub_serial: str # Unique serial number of the hub
|
||||
self.hub_name: str # The friendly name of the hub
|
||||
self.hub_model: str
|
||||
self.hub_software_version: str
|
||||
self.hub_api_url: str = config[CONF_URL]
|
||||
# Unique serial number of the hub
|
||||
self.hub_serial = info.serial_number
|
||||
# Device infos by fibaro device id
|
||||
self._device_infos: dict[int, DeviceInfo] = {}
|
||||
|
||||
def connect(self) -> None:
|
||||
"""Start the communication with the Fibaro controller."""
|
||||
|
||||
# Return value doesn't need to be checked,
|
||||
# it is only relevant when connecting without credentials
|
||||
self._client.connect()
|
||||
info = self._client.read_info()
|
||||
self.hub_serial = info.serial_number
|
||||
self.hub_name = info.hc_name
|
||||
self.hub_model = info.platform
|
||||
self.hub_software_version = info.current_version
|
||||
|
||||
self._room_map = {room.fibaro_id: room for room in self._client.read_rooms()}
|
||||
self._read_devices()
|
||||
self._scenes = self._client.read_scenes()
|
||||
|
||||
def connect_with_error_handling(self) -> None:
|
||||
"""Translate connect errors to easily differentiate auth and connect failures.
|
||||
|
||||
When there is a better error handling in the used library this can be improved.
|
||||
"""
|
||||
try:
|
||||
self.connect()
|
||||
except HTTPError as http_ex:
|
||||
if http_ex.response.status_code == 403:
|
||||
raise FibaroAuthFailed from http_ex
|
||||
|
||||
raise FibaroConnectFailed from http_ex
|
||||
except Exception as ex:
|
||||
raise FibaroConnectFailed from ex
|
||||
|
||||
def enable_state_handler(self) -> None:
|
||||
"""Start StateHandler thread for monitoring updates."""
|
||||
@@ -302,14 +270,20 @@ class FibaroController:
|
||||
|
||||
def get_room_name(self, room_id: int) -> str | None:
|
||||
"""Get the room name by room id."""
|
||||
assert self._room_map
|
||||
room = self._room_map.get(room_id)
|
||||
return room.name if room else None
|
||||
return self._room_map.get(room_id)
|
||||
|
||||
def read_scenes(self) -> list[SceneModel]:
|
||||
"""Return list of scenes."""
|
||||
return self._scenes
|
||||
|
||||
def read_fibaro_info(self) -> InfoModel:
|
||||
"""Return the general info about the hub."""
|
||||
return self._fibaro_info
|
||||
|
||||
def get_frontend_url(self) -> str:
|
||||
"""Return the url to the Fibaro hub web UI."""
|
||||
return self._client.frontend_url()
|
||||
|
||||
def _read_devices(self) -> None:
|
||||
"""Read and process the device list."""
|
||||
devices = self._client.read_devices()
|
||||
@@ -319,20 +293,17 @@ class FibaroController:
|
||||
for device in devices:
|
||||
try:
|
||||
device.fibaro_controller = self
|
||||
if device.room_id == 0:
|
||||
room_name = self.get_room_name(device.room_id)
|
||||
if not room_name:
|
||||
room_name = "Unknown"
|
||||
else:
|
||||
room_name = self._room_map[device.room_id].name
|
||||
device.room_name = room_name
|
||||
device.friendly_name = f"{room_name} {device.name}"
|
||||
device.ha_id = (
|
||||
f"{slugify(room_name)}_{slugify(device.name)}_{device.fibaro_id}"
|
||||
)
|
||||
if device.enabled and (not device.is_plugin or self._import_plugins):
|
||||
device.mapped_platform = self._map_device_to_platform(device)
|
||||
else:
|
||||
device.mapped_platform = None
|
||||
if (platform := device.mapped_platform) is None:
|
||||
platform = self._map_device_to_platform(device)
|
||||
if platform is None:
|
||||
continue
|
||||
device.unique_id_str = f"{slugify(self.hub_serial)}.{device.fibaro_id}"
|
||||
self._create_device_info(device, devices)
|
||||
@@ -375,11 +346,17 @@ class FibaroController:
|
||||
pass
|
||||
|
||||
|
||||
def connect_fibaro_client(data: Mapping[str, Any]) -> tuple[InfoModel, FibaroClient]:
|
||||
"""Connect to the fibaro hub and read some basic data."""
|
||||
client = FibaroClient(data[CONF_URL])
|
||||
info = client.connect_with_credentials(data[CONF_USERNAME], data[CONF_PASSWORD])
|
||||
return (info, client)
|
||||
|
||||
|
||||
def init_controller(data: Mapping[str, Any]) -> FibaroController:
|
||||
"""Validate the user input allows us to connect to fibaro."""
|
||||
controller = FibaroController(data)
|
||||
controller.connect_with_error_handling()
|
||||
return controller
|
||||
"""Connect to the fibaro hub and init the controller."""
|
||||
info, client = connect_fibaro_client(data)
|
||||
return FibaroController(client, info, data[CONF_IMPORT_PLUGINS])
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool:
|
||||
@@ -393,22 +370,24 @@ async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bo
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not connect to controller at {entry.data[CONF_URL]}"
|
||||
) from connect_ex
|
||||
except FibaroAuthFailed as auth_ex:
|
||||
except FibaroAuthenticationFailed as auth_ex:
|
||||
raise ConfigEntryAuthFailed from auth_ex
|
||||
|
||||
entry.runtime_data = controller
|
||||
|
||||
# register the hub device info separately as the hub has sometimes no entities
|
||||
fibaro_info = controller.read_fibaro_info()
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, controller.hub_serial)},
|
||||
serial_number=controller.hub_serial,
|
||||
manufacturer="Fibaro",
|
||||
name=controller.hub_name,
|
||||
model=controller.hub_model,
|
||||
sw_version=controller.hub_software_version,
|
||||
configuration_url=controller.hub_api_url.removesuffix("/api/"),
|
||||
manufacturer=fibaro_info.manufacturer_name,
|
||||
name=fibaro_info.hc_name,
|
||||
model=fibaro_info.model_name,
|
||||
sw_version=fibaro_info.current_version,
|
||||
configuration_url=controller.get_frontend_url(),
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, fibaro_info.mac_address)},
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
@@ -443,11 +422,3 @@ async def async_remove_config_entry_device(
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FibaroConnectFailed(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect to fibaro home center."""
|
||||
|
||||
|
||||
class FibaroAuthFailed(HomeAssistantError):
|
||||
"""Error to indicate that authentication failed on fibaro home center."""
|
||||
|
||||
@@ -129,13 +129,13 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the Fibaro device."""
|
||||
super().__init__(fibaro_device)
|
||||
self._temp_sensor_device: FibaroEntity | None = None
|
||||
self._target_temp_device: FibaroEntity | None = None
|
||||
self._op_mode_device: FibaroEntity | None = None
|
||||
self._fan_mode_device: FibaroEntity | None = None
|
||||
self._temp_sensor_device: DeviceModel | None = None
|
||||
self._target_temp_device: DeviceModel | None = None
|
||||
self._op_mode_device: DeviceModel | None = None
|
||||
self._fan_mode_device: DeviceModel | None = None
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
|
||||
|
||||
siblings = fibaro_device.fibaro_controller.get_siblings(fibaro_device)
|
||||
siblings = self.controller.get_siblings(fibaro_device)
|
||||
_LOGGER.debug("%s siblings: %s", fibaro_device.ha_id, siblings)
|
||||
tempunit = "C"
|
||||
for device in siblings:
|
||||
@@ -147,23 +147,23 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
and (device.value.has_value or device.has_heating_thermostat_setpoint)
|
||||
and device.unit in ("C", "F")
|
||||
):
|
||||
self._temp_sensor_device = FibaroEntity(device)
|
||||
self._temp_sensor_device = device
|
||||
tempunit = device.unit
|
||||
|
||||
if any(
|
||||
action for action in TARGET_TEMP_ACTIONS if action in device.actions
|
||||
):
|
||||
self._target_temp_device = FibaroEntity(device)
|
||||
self._target_temp_device = device
|
||||
self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
if device.has_unit:
|
||||
tempunit = device.unit
|
||||
|
||||
if any(action for action in OP_MODE_ACTIONS if action in device.actions):
|
||||
self._op_mode_device = FibaroEntity(device)
|
||||
self._op_mode_device = device
|
||||
self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE
|
||||
|
||||
if "setFanMode" in device.actions:
|
||||
self._fan_mode_device = FibaroEntity(device)
|
||||
self._fan_mode_device = device
|
||||
self._attr_supported_features |= ClimateEntityFeature.FAN_MODE
|
||||
|
||||
if tempunit == "F":
|
||||
@@ -172,7 +172,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
self._attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
if self._fan_mode_device:
|
||||
fan_modes = self._fan_mode_device.fibaro_device.supported_modes
|
||||
fan_modes = self._fan_mode_device.supported_modes
|
||||
self._attr_fan_modes = []
|
||||
for mode in fan_modes:
|
||||
if mode not in FANMODES:
|
||||
@@ -184,7 +184,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if self._op_mode_device:
|
||||
self._attr_preset_modes = []
|
||||
self._attr_hvac_modes: list[HVACMode] = []
|
||||
device = self._op_mode_device.fibaro_device
|
||||
device = self._op_mode_device
|
||||
if device.has_supported_thermostat_modes:
|
||||
for mode in device.supported_thermostat_modes:
|
||||
try:
|
||||
@@ -222,15 +222,15 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
"- _fan_mode_device %s"
|
||||
),
|
||||
self.ha_id,
|
||||
self._temp_sensor_device.ha_id if self._temp_sensor_device else "None",
|
||||
self._target_temp_device.ha_id if self._target_temp_device else "None",
|
||||
self._op_mode_device.ha_id if self._op_mode_device else "None",
|
||||
self._fan_mode_device.ha_id if self._fan_mode_device else "None",
|
||||
self._temp_sensor_device.fibaro_id if self._temp_sensor_device else "None",
|
||||
self._target_temp_device.fibaro_id if self._target_temp_device else "None",
|
||||
self._op_mode_device.fibaro_id if self._op_mode_device else "None",
|
||||
self._fan_mode_device.fibaro_id if self._fan_mode_device else "None",
|
||||
)
|
||||
await super().async_added_to_hass()
|
||||
|
||||
# Register update callback for child devices
|
||||
siblings = self.fibaro_device.fibaro_controller.get_siblings(self.fibaro_device)
|
||||
siblings = self.controller.get_siblings(self.fibaro_device)
|
||||
for device in siblings:
|
||||
if device != self.fibaro_device:
|
||||
self.controller.register(device.fibaro_id, self._update_callback)
|
||||
@@ -240,14 +240,14 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
"""Return the fan setting."""
|
||||
if not self._fan_mode_device:
|
||||
return None
|
||||
mode = self._fan_mode_device.fibaro_device.mode
|
||||
mode = self._fan_mode_device.mode
|
||||
return FANMODES[mode]
|
||||
|
||||
def set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
if not self._fan_mode_device:
|
||||
return
|
||||
self._fan_mode_device.action("setFanMode", HA_FANMODES[fan_mode])
|
||||
self._fan_mode_device.execute_action("setFanMode", [HA_FANMODES[fan_mode]])
|
||||
|
||||
@property
|
||||
def fibaro_op_mode(self) -> str | int:
|
||||
@@ -255,7 +255,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return HA_OPMODES_HVAC[HVACMode.AUTO]
|
||||
|
||||
device = self._op_mode_device.fibaro_device
|
||||
device = self._op_mode_device
|
||||
|
||||
if device.has_operating_mode:
|
||||
return device.operating_mode
|
||||
@@ -281,17 +281,17 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return
|
||||
|
||||
if "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode])
|
||||
elif "setThermostatMode" in self._op_mode_device.fibaro_device.actions:
|
||||
device = self._op_mode_device.fibaro_device
|
||||
device = self._op_mode_device
|
||||
if "setOperatingMode" in device.actions:
|
||||
device.execute_action("setOperatingMode", [HA_OPMODES_HVAC[hvac_mode]])
|
||||
elif "setThermostatMode" in device.actions:
|
||||
if device.has_supported_thermostat_modes:
|
||||
for mode in device.supported_thermostat_modes:
|
||||
if mode.lower() == hvac_mode:
|
||||
self._op_mode_device.action("setThermostatMode", mode)
|
||||
device.execute_action("setThermostatMode", [mode])
|
||||
break
|
||||
elif "setMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setMode", HA_OPMODES_HVAC[hvac_mode])
|
||||
elif "setMode" in device.actions:
|
||||
device.execute_action("setMode", [HA_OPMODES_HVAC[hvac_mode]])
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
@@ -299,7 +299,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return None
|
||||
|
||||
device = self._op_mode_device.fibaro_device
|
||||
device = self._op_mode_device
|
||||
if device.has_thermostat_operating_state:
|
||||
with suppress(ValueError):
|
||||
return HVACAction(device.thermostat_operating_state.lower())
|
||||
@@ -315,15 +315,15 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return None
|
||||
|
||||
if self._op_mode_device.fibaro_device.has_thermostat_mode:
|
||||
mode = self._op_mode_device.fibaro_device.thermostat_mode
|
||||
if self._op_mode_device.has_thermostat_mode:
|
||||
mode = self._op_mode_device.thermostat_mode
|
||||
if self.preset_modes is not None and mode in self.preset_modes:
|
||||
return mode
|
||||
return None
|
||||
if self._op_mode_device.fibaro_device.has_operating_mode:
|
||||
mode = self._op_mode_device.fibaro_device.operating_mode
|
||||
if self._op_mode_device.has_operating_mode:
|
||||
mode = self._op_mode_device.operating_mode
|
||||
else:
|
||||
mode = self._op_mode_device.fibaro_device.mode
|
||||
mode = self._op_mode_device.mode
|
||||
|
||||
if mode not in OPMODES_PRESET:
|
||||
return None
|
||||
@@ -334,20 +334,22 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if self._op_mode_device is None:
|
||||
return
|
||||
|
||||
if "setThermostatMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setThermostatMode", preset_mode)
|
||||
elif "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action(
|
||||
"setOperatingMode", HA_OPMODES_PRESET[preset_mode]
|
||||
if "setThermostatMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action("setThermostatMode", [preset_mode])
|
||||
elif "setOperatingMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action(
|
||||
"setOperatingMode", [HA_OPMODES_PRESET[preset_mode]]
|
||||
)
|
||||
elif "setMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action(
|
||||
"setMode", [HA_OPMODES_PRESET[preset_mode]]
|
||||
)
|
||||
elif "setMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setMode", HA_OPMODES_PRESET[preset_mode])
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if self._temp_sensor_device:
|
||||
device = self._temp_sensor_device.fibaro_device
|
||||
device = self._temp_sensor_device
|
||||
if device.has_heating_thermostat_setpoint:
|
||||
return device.heating_thermostat_setpoint
|
||||
return device.value.float_value()
|
||||
@@ -357,7 +359,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
if self._target_temp_device:
|
||||
device = self._target_temp_device.fibaro_device
|
||||
device = self._target_temp_device
|
||||
if device.has_heating_thermostat_setpoint_future:
|
||||
return device.heating_thermostat_setpoint_future
|
||||
return device.target_level
|
||||
@@ -368,9 +370,11 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
temperature = kwargs.get(ATTR_TEMPERATURE)
|
||||
target = self._target_temp_device
|
||||
if target is not None and temperature is not None:
|
||||
if "setThermostatSetpoint" in target.fibaro_device.actions:
|
||||
target.action("setThermostatSetpoint", self.fibaro_op_mode, temperature)
|
||||
elif "setHeatingThermostatSetpoint" in target.fibaro_device.actions:
|
||||
target.action("setHeatingThermostatSetpoint", temperature)
|
||||
if "setThermostatSetpoint" in target.actions:
|
||||
target.execute_action(
|
||||
"setThermostatSetpoint", [self.fibaro_op_mode, temperature]
|
||||
)
|
||||
elif "setHeatingThermostatSetpoint" in target.actions:
|
||||
target.execute_action("setHeatingThermostatSetpoint", [temperature])
|
||||
else:
|
||||
target.action("setTargetLevel", temperature)
|
||||
target.execute_action("setTargetLevel", [temperature])
|
||||
|
||||
@@ -6,6 +6,7 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyfibaro.fibaro_client import FibaroAuthenticationFailed, FibaroConnectFailed
|
||||
from slugify import slugify
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -13,7 +14,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import FibaroAuthFailed, FibaroConnectFailed, init_controller
|
||||
from . import connect_fibaro_client
|
||||
from .const import CONF_IMPORT_PLUGINS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -33,16 +34,16 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
controller = await hass.async_add_executor_job(init_controller, data)
|
||||
info, _ = await hass.async_add_executor_job(connect_fibaro_client, data)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully connected to fibaro home center %s with name %s",
|
||||
controller.hub_serial,
|
||||
controller.hub_name,
|
||||
info.serial_number,
|
||||
info.hc_name,
|
||||
)
|
||||
return {
|
||||
"serial_number": slugify(controller.hub_serial),
|
||||
"name": controller.hub_name,
|
||||
"serial_number": slugify(info.serial_number),
|
||||
"name": info.hc_name,
|
||||
}
|
||||
|
||||
|
||||
@@ -75,7 +76,7 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
info = await _validate_input(self.hass, user_input)
|
||||
except FibaroConnectFailed:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FibaroAuthFailed:
|
||||
except FibaroAuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
await self.async_set_unique_id(info["serial_number"])
|
||||
@@ -106,7 +107,7 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await _validate_input(self.hass, new_data)
|
||||
except FibaroConnectFailed:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FibaroAuthFailed:
|
||||
except FibaroAuthenticationFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
|
||||
@@ -11,6 +11,8 @@ from pyfibaro.fibaro_device import DeviceModel
|
||||
from homeassistant.const import ATTR_ARMED, ATTR_BATTERY_LEVEL
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import FibaroController
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -22,7 +24,7 @@ class FibaroEntity(Entity):
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the device."""
|
||||
self.fibaro_device = fibaro_device
|
||||
self.controller = fibaro_device.fibaro_controller
|
||||
self.controller: FibaroController = fibaro_device.fibaro_controller
|
||||
self.ha_id = fibaro_device.ha_id
|
||||
self._attr_name = fibaro_device.friendly_name
|
||||
self._attr_unique_id = fibaro_device.unique_id_str
|
||||
@@ -54,15 +56,6 @@ class FibaroEntity(Entity):
|
||||
return self.fibaro_device.value_2.int_value()
|
||||
return None
|
||||
|
||||
def dont_know_message(self, cmd: str) -> None:
|
||||
"""Make a warning in case we don't know how to perform an action."""
|
||||
_LOGGER.warning(
|
||||
"Not sure how to %s: %s (available actions: %s)",
|
||||
cmd,
|
||||
str(self.ha_id),
|
||||
str(self.fibaro_device.actions),
|
||||
)
|
||||
|
||||
def set_level(self, level: int) -> None:
|
||||
"""Set the level of Fibaro device."""
|
||||
self.action("setValue", level)
|
||||
@@ -97,11 +90,7 @@ class FibaroEntity(Entity):
|
||||
|
||||
def action(self, cmd: str, *args: Any) -> None:
|
||||
"""Perform an action on the Fibaro HC."""
|
||||
if cmd in self.fibaro_device.actions:
|
||||
self.fibaro_device.execute_action(cmd, args)
|
||||
_LOGGER.debug("-> %s.%s%s called", str(self.ha_id), str(cmd), str(args))
|
||||
else:
|
||||
self.dont_know_message(cmd)
|
||||
self.fibaro_device.execute_action(cmd, args)
|
||||
|
||||
@property
|
||||
def current_binary_state(self) -> bool:
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyfibaro"],
|
||||
"requirements": ["pyfibaro==0.8.0"]
|
||||
"requirements": ["pyfibaro==0.8.2"]
|
||||
}
|
||||
|
||||
1
homeassistant/components/frankever/__init__.py
Normal file
1
homeassistant/components/frankever/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
"""FrankEver virtual integration."""
|
||||
6
homeassistant/components/frankever/manifest.json
Normal file
6
homeassistant/components/frankever/manifest.json
Normal file
@@ -0,0 +1,6 @@
|
||||
{
|
||||
"domain": "frankever",
|
||||
"name": "FrankEver",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "shelly"
|
||||
}
|
||||
@@ -1,7 +1,6 @@
|
||||
{
|
||||
"domain": "frontend",
|
||||
"name": "Home Assistant Frontend",
|
||||
"after_dependencies": ["backup"],
|
||||
"codeowners": ["@home-assistant/frontend"],
|
||||
"dependencies": [
|
||||
"api",
|
||||
@@ -21,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250221.0"]
|
||||
"requirements": ["home-assistant-frontend==20250228.0"]
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@
|
||||
},
|
||||
"mode": {
|
||||
"name": "[%key:common::config_flow::data::mode%]",
|
||||
"description": "One of: off, timer or footprint."
|
||||
"description": "The zone's operating mode."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7,7 +7,7 @@ from collections.abc import Callable
|
||||
from google_drive_api.exceptions import GoogleDriveApiError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import instance_id
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -49,7 +49,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: GoogleDriveConfigEntry)
|
||||
except GoogleDriveApiError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
def async_notify_backup_listeners() -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
entry.async_on_unload(entry.async_on_state_change(async_notify_backup_listeners))
|
||||
|
||||
return True
|
||||
|
||||
@@ -58,15 +62,4 @@ async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleDriveConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
_async_notify_backup_listeners_soon(hass)
|
||||
return True
|
||||
|
||||
|
||||
def _async_notify_backup_listeners(hass: HomeAssistant) -> None:
|
||||
for listener in hass.data.get(DATA_BACKUP_AGENT_LISTENERS, []):
|
||||
listener()
|
||||
|
||||
|
||||
@callback
|
||||
def _async_notify_backup_listeners_soon(hass: HomeAssistant) -> None:
|
||||
hass.loop.call_soon(_async_notify_backup_listeners, hass)
|
||||
|
||||
@@ -2,12 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
|
||||
from google import genai # type: ignore[attr-defined]
|
||||
from google.genai.errors import APIError, ClientError
|
||||
from PIL import Image
|
||||
from requests.exceptions import Timeout
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -26,6 +24,7 @@ from homeassistant.exceptions import (
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
@@ -38,6 +37,7 @@ from .const import (
|
||||
|
||||
SERVICE_GENERATE_CONTENT = "generate_content"
|
||||
CONF_IMAGE_FILENAME = "image_filename"
|
||||
CONF_FILENAMES = "filenames"
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
@@ -50,31 +50,43 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
async def generate_content(call: ServiceCall) -> ServiceResponse:
|
||||
"""Generate content from text and optionally images."""
|
||||
|
||||
if call.data[CONF_IMAGE_FILENAME]:
|
||||
# Deprecated in 2025.3, to remove in 2025.9
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_image_filename_parameter",
|
||||
breaks_in_ha_version="2025.9.0",
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_image_filename_parameter",
|
||||
)
|
||||
|
||||
prompt_parts = [call.data[CONF_PROMPT]]
|
||||
|
||||
def append_images_to_prompt():
|
||||
image_filenames = call.data[CONF_IMAGE_FILENAME]
|
||||
for image_filename in image_filenames:
|
||||
if not hass.config.is_allowed_path(image_filename):
|
||||
raise HomeAssistantError(
|
||||
f"Cannot read `{image_filename}`, no access to path; "
|
||||
"`allowlist_external_dirs` may need to be adjusted in "
|
||||
"`configuration.yaml`"
|
||||
)
|
||||
if not Path(image_filename).exists():
|
||||
raise HomeAssistantError(f"`{image_filename}` does not exist")
|
||||
mime_type, _ = mimetypes.guess_type(image_filename)
|
||||
if mime_type is None or not mime_type.startswith("image"):
|
||||
raise HomeAssistantError(f"`{image_filename}` is not an image")
|
||||
prompt_parts.append(Image.open(image_filename))
|
||||
|
||||
await hass.async_add_executor_job(append_images_to_prompt)
|
||||
|
||||
config_entry: GoogleGenerativeAIConfigEntry = hass.config_entries.async_entries(
|
||||
DOMAIN
|
||||
)[0]
|
||||
|
||||
client = config_entry.runtime_data
|
||||
|
||||
def append_files_to_prompt():
|
||||
image_filenames = call.data[CONF_IMAGE_FILENAME]
|
||||
filenames = call.data[CONF_FILENAMES]
|
||||
for filename in set(image_filenames + filenames):
|
||||
if not hass.config.is_allowed_path(filename):
|
||||
raise HomeAssistantError(
|
||||
f"Cannot read `{filename}`, no access to path; "
|
||||
"`allowlist_external_dirs` may need to be adjusted in "
|
||||
"`configuration.yaml`"
|
||||
)
|
||||
if not Path(filename).exists():
|
||||
raise HomeAssistantError(f"`{filename}` does not exist")
|
||||
prompt_parts.append(client.files.upload(file=filename))
|
||||
|
||||
await hass.async_add_executor_job(append_files_to_prompt)
|
||||
|
||||
try:
|
||||
response = await client.aio.models.generate_content(
|
||||
model=RECOMMENDED_CHAT_MODEL, contents=prompt_parts
|
||||
@@ -105,6 +117,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
vol.Optional(CONF_IMAGE_FILENAME, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
vol.Optional(CONF_FILENAMES, default=[]): vol.All(
|
||||
cv.ensure_list, [cv.string]
|
||||
),
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
|
||||
@@ -111,9 +111,20 @@ def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
continue
|
||||
if key == "any_of":
|
||||
val = [_format_schema(subschema) for subschema in val]
|
||||
if key == "type":
|
||||
elif key == "type":
|
||||
val = val.upper()
|
||||
if key == "items":
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
|
||||
@@ -9,3 +9,8 @@ generate_content:
|
||||
required: false
|
||||
selector:
|
||||
object:
|
||||
filenames:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
|
||||
@@ -56,10 +56,21 @@
|
||||
},
|
||||
"image_filename": {
|
||||
"name": "Image filename",
|
||||
"description": "Images",
|
||||
"description": "Deprecated. Use filenames instead.",
|
||||
"example": "/config/www/image.jpg"
|
||||
},
|
||||
"filenames": {
|
||||
"name": "Attachment filenames",
|
||||
"description": "Attachments to add to the prompt (images, PDFs, etc)",
|
||||
"example": "/config/www/image.jpg"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_image_filename_parameter": {
|
||||
"title": "Deprecated 'image_filename' parameter",
|
||||
"description": "The 'image_filename' parameter in Google Generative AI actions is deprecated. Please edit scripts and automations to use 'filenames' intead."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -89,6 +89,10 @@ class GoveeLocalApiCoordinator(DataUpdateCoordinator[list[GoveeDevice]]):
|
||||
"""Set light color in kelvin."""
|
||||
await device.set_temperature(temperature)
|
||||
|
||||
async def set_scene(self, device: GoveeController, scene: str) -> None:
|
||||
"""Set light scene."""
|
||||
await device.set_scene(scene)
|
||||
|
||||
@property
|
||||
def devices(self) -> list[GoveeDevice]:
|
||||
"""Return a list of discovered Govee devices."""
|
||||
|
||||
@@ -10,9 +10,11 @@ from govee_local_api import GoveeDevice, GoveeLightFeatures
|
||||
from homeassistant.components.light import (
|
||||
ATTR_BRIGHTNESS,
|
||||
ATTR_COLOR_TEMP_KELVIN,
|
||||
ATTR_EFFECT,
|
||||
ATTR_RGB_COLOR,
|
||||
ColorMode,
|
||||
LightEntity,
|
||||
LightEntityFeature,
|
||||
filter_supported_color_modes,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -25,6 +27,8 @@ from .coordinator import GoveeLocalApiCoordinator, GoveeLocalConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_NONE_SCENE = "none"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
@@ -50,10 +54,22 @@ async def async_setup_entry(
|
||||
class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
"""Govee Light."""
|
||||
|
||||
_attr_translation_key = "govee_light"
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supported_color_modes: set[ColorMode]
|
||||
_fixed_color_mode: ColorMode | None = None
|
||||
_attr_effect_list: list[str] | None = None
|
||||
_attr_effect: str | None = None
|
||||
_attr_supported_features: LightEntityFeature = LightEntityFeature(0)
|
||||
_last_color_state: (
|
||||
tuple[
|
||||
ColorMode | str | None,
|
||||
int | None,
|
||||
tuple[int, int, int] | tuple[int | None] | None,
|
||||
]
|
||||
| None
|
||||
) = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
@@ -80,6 +96,13 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
if GoveeLightFeatures.BRIGHTNESS & capabilities.features:
|
||||
color_modes.add(ColorMode.BRIGHTNESS)
|
||||
|
||||
if (
|
||||
GoveeLightFeatures.SCENES & capabilities.features
|
||||
and capabilities.scenes
|
||||
):
|
||||
self._attr_supported_features = LightEntityFeature.EFFECT
|
||||
self._attr_effect_list = [_NONE_SCENE, *capabilities.scenes.keys()]
|
||||
|
||||
self._attr_supported_color_modes = filter_supported_color_modes(color_modes)
|
||||
if len(self._attr_supported_color_modes) == 1:
|
||||
# If the light supports only a single color mode, set it now
|
||||
@@ -143,12 +166,27 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
|
||||
if ATTR_RGB_COLOR in kwargs:
|
||||
self._attr_color_mode = ColorMode.RGB
|
||||
self._attr_effect = None
|
||||
self._last_color_state = None
|
||||
red, green, blue = kwargs[ATTR_RGB_COLOR]
|
||||
await self.coordinator.set_rgb_color(self._device, red, green, blue)
|
||||
elif ATTR_COLOR_TEMP_KELVIN in kwargs:
|
||||
self._attr_color_mode = ColorMode.COLOR_TEMP
|
||||
self._attr_effect = None
|
||||
self._last_color_state = None
|
||||
temperature: float = kwargs[ATTR_COLOR_TEMP_KELVIN]
|
||||
await self.coordinator.set_temperature(self._device, int(temperature))
|
||||
elif ATTR_EFFECT in kwargs:
|
||||
effect = kwargs[ATTR_EFFECT]
|
||||
if effect and self._attr_effect_list and effect in self._attr_effect_list:
|
||||
if effect == _NONE_SCENE:
|
||||
self._attr_effect = None
|
||||
await self._restore_last_color_state()
|
||||
else:
|
||||
self._attr_effect = effect
|
||||
self._save_last_color_state()
|
||||
await self.coordinator.set_scene(self._device, effect)
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
@@ -159,3 +197,27 @@ class GoveeLight(CoordinatorEntity[GoveeLocalApiCoordinator], LightEntity):
|
||||
@callback
|
||||
def _update_callback(self, device: GoveeDevice) -> None:
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _save_last_color_state(self) -> None:
|
||||
color_mode = self.color_mode
|
||||
self._last_color_state = (
|
||||
color_mode,
|
||||
self.brightness,
|
||||
(self.color_temp_kelvin,)
|
||||
if color_mode == ColorMode.COLOR_TEMP
|
||||
else self.rgb_color,
|
||||
)
|
||||
|
||||
async def _restore_last_color_state(self) -> None:
|
||||
if self._last_color_state:
|
||||
color_mode, brightness, color = self._last_color_state
|
||||
if color:
|
||||
if color_mode == ColorMode.RGB:
|
||||
await self.coordinator.set_rgb_color(self._device, *color)
|
||||
elif color_mode == ColorMode.COLOR_TEMP:
|
||||
await self.coordinator.set_temperature(self._device, *color)
|
||||
if brightness:
|
||||
await self.coordinator.set_brightness(
|
||||
self._device, int((float(brightness) / 255.0) * 100.0)
|
||||
)
|
||||
self._last_color_state = None
|
||||
|
||||
@@ -9,5 +9,29 @@
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
||||
"no_devices_found": "[%key:common::config_flow::abort::no_devices_found%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"light": {
|
||||
"govee_light": {
|
||||
"state_attributes": {
|
||||
"effect": {
|
||||
"state": {
|
||||
"none": "None",
|
||||
"sunrise": "Sunrise",
|
||||
"sunset": "Sunset",
|
||||
"movie": "Movie",
|
||||
"dating": "Dating",
|
||||
"romantic": "Romantic",
|
||||
"twinkle": "Twinkle",
|
||||
"candlelight": "Candlelight",
|
||||
"snowflake": "Snowflake",
|
||||
"energetic": "Energetic",
|
||||
"breathe": "Breathe",
|
||||
"crossing": "Crossing"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,3 +20,4 @@ MAX_ERRORS = 2
|
||||
TARGET_TEMPERATURE_STEP = 1
|
||||
|
||||
UPDATE_INTERVAL = 60
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL = UPDATE_INTERVAL * 2
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -24,6 +25,7 @@ from .const import (
|
||||
DISPATCH_DEVICE_DISCOVERED,
|
||||
DOMAIN,
|
||||
MAX_ERRORS,
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL,
|
||||
UPDATE_INTERVAL,
|
||||
)
|
||||
|
||||
@@ -48,7 +50,6 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
always_update=False,
|
||||
)
|
||||
self.device = device
|
||||
self.device.add_handler(Response.DATA, self.device_state_updated)
|
||||
self.device.add_handler(Response.RESULT, self.device_state_updated)
|
||||
|
||||
self._error_count: int = 0
|
||||
@@ -88,7 +89,9 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
# raise update failed if time for more than MAX_ERRORS has passed since last update
|
||||
now = utcnow()
|
||||
elapsed_success = now - self._last_response_time
|
||||
if self.update_interval and elapsed_success >= self.update_interval:
|
||||
if self.update_interval and elapsed_success >= timedelta(
|
||||
seconds=MAX_EXPECTED_RESPONSE_TIME_INTERVAL
|
||||
):
|
||||
if not self._last_error_time or (
|
||||
(now - self.update_interval) >= self._last_error_time
|
||||
):
|
||||
@@ -96,16 +99,19 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self._error_count += 1
|
||||
|
||||
_LOGGER.warning(
|
||||
"Device %s is unresponsive for %s seconds",
|
||||
"Device %s took an unusually long time to respond, %s seconds",
|
||||
self.name,
|
||||
elapsed_success,
|
||||
)
|
||||
else:
|
||||
self._error_count = 0
|
||||
if self.last_update_success and self._error_count >= MAX_ERRORS:
|
||||
raise UpdateFailed(
|
||||
f"Device {self.name} is unresponsive for too long and now unavailable"
|
||||
)
|
||||
|
||||
return self.device.raw_properties
|
||||
self._last_response_time = utcnow()
|
||||
return copy.deepcopy(self.device.raw_properties)
|
||||
|
||||
async def push_state_update(self):
|
||||
"""Send state updates to the physical device."""
|
||||
|
||||
@@ -26,6 +26,7 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="todayEnergy",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_output_power",
|
||||
@@ -33,6 +34,7 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="invTodayPpv",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_energy_output",
|
||||
|
||||
@@ -35,6 +35,15 @@ ATTR_TYPE = "type"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_TAG = "tag"
|
||||
ATTR_KEYWORD = "keyword"
|
||||
ATTR_REMOVE_TAG = "remove_tag"
|
||||
ATTR_ALIAS = "alias"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_COST = "cost"
|
||||
ATTR_NOTES = "notes"
|
||||
ATTR_UP_DOWN = "up_down"
|
||||
ATTR_FREQUENCY = "frequency"
|
||||
ATTR_COUNTER_UP = "counter_up"
|
||||
ATTR_COUNTER_DOWN = "counter_down"
|
||||
|
||||
SERVICE_CAST_SKILL = "cast_skill"
|
||||
SERVICE_START_QUEST = "start_quest"
|
||||
@@ -50,6 +59,9 @@ SERVICE_SCORE_REWARD = "score_reward"
|
||||
|
||||
SERVICE_TRANSFORMATION = "transformation"
|
||||
|
||||
SERVICE_UPDATE_REWARD = "update_reward"
|
||||
SERVICE_CREATE_REWARD = "create_reward"
|
||||
SERVICE_UPDATE_HABIT = "update_habit"
|
||||
|
||||
DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf"
|
||||
X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}"
|
||||
|
||||
@@ -217,6 +217,26 @@
|
||||
"sections": {
|
||||
"filter": "mdi:calendar-filter"
|
||||
}
|
||||
},
|
||||
"update_reward": {
|
||||
"service": "mdi:treasure-chest",
|
||||
"sections": {
|
||||
"tag_options": "mdi:tag",
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
},
|
||||
"create_reward": {
|
||||
"service": "mdi:treasure-chest-outline",
|
||||
"sections": {
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
},
|
||||
"update_habit": {
|
||||
"service": "mdi:contrast-box",
|
||||
"sections": {
|
||||
"tag_options": "mdi:tag",
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -4,15 +4,18 @@ from __future__ import annotations
|
||||
|
||||
from dataclasses import asdict
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
from uuid import UUID
|
||||
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
Direction,
|
||||
Frequency,
|
||||
HabiticaException,
|
||||
NotAuthorizedError,
|
||||
NotFoundError,
|
||||
Skill,
|
||||
Task,
|
||||
TaskData,
|
||||
TaskPriority,
|
||||
TaskType,
|
||||
@@ -20,6 +23,7 @@ from habiticalib import (
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.todo import ATTR_RENAME
|
||||
from homeassistant.config_entries import ConfigEntryState
|
||||
from homeassistant.const import ATTR_NAME, CONF_NAME
|
||||
from homeassistant.core import (
|
||||
@@ -34,19 +38,27 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
|
||||
from .const import (
|
||||
ATTR_ALIAS,
|
||||
ATTR_ARGS,
|
||||
ATTR_CONFIG_ENTRY,
|
||||
ATTR_COST,
|
||||
ATTR_COUNTER_DOWN,
|
||||
ATTR_COUNTER_UP,
|
||||
ATTR_DATA,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_FREQUENCY,
|
||||
ATTR_ITEM,
|
||||
ATTR_KEYWORD,
|
||||
ATTR_NOTES,
|
||||
ATTR_PATH,
|
||||
ATTR_PRIORITY,
|
||||
ATTR_REMOVE_TAG,
|
||||
ATTR_SKILL,
|
||||
ATTR_TAG,
|
||||
ATTR_TARGET,
|
||||
ATTR_TASK,
|
||||
ATTR_TYPE,
|
||||
ATTR_UP_DOWN,
|
||||
DOMAIN,
|
||||
EVENT_API_CALL_SUCCESS,
|
||||
SERVICE_ABORT_QUEST,
|
||||
@@ -54,6 +66,7 @@ from .const import (
|
||||
SERVICE_API_CALL,
|
||||
SERVICE_CANCEL_QUEST,
|
||||
SERVICE_CAST_SKILL,
|
||||
SERVICE_CREATE_REWARD,
|
||||
SERVICE_GET_TASKS,
|
||||
SERVICE_LEAVE_QUEST,
|
||||
SERVICE_REJECT_QUEST,
|
||||
@@ -61,6 +74,8 @@ from .const import (
|
||||
SERVICE_SCORE_REWARD,
|
||||
SERVICE_START_QUEST,
|
||||
SERVICE_TRANSFORMATION,
|
||||
SERVICE_UPDATE_HABIT,
|
||||
SERVICE_UPDATE_REWARD,
|
||||
)
|
||||
from .coordinator import HabiticaConfigEntry
|
||||
|
||||
@@ -104,6 +119,39 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
BASE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Optional(ATTR_RENAME): cv.string,
|
||||
vol.Optional(ATTR_NOTES): cv.string,
|
||||
vol.Optional(ATTR_TAG): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_ALIAS): vol.All(
|
||||
cv.string, cv.matches_regex("^[a-zA-Z0-9-_]*$")
|
||||
),
|
||||
vol.Optional(ATTR_COST): vol.All(vol.Coerce(float), vol.Range(0)),
|
||||
vol.Optional(ATTR_PRIORITY): vol.All(
|
||||
vol.Upper, vol.In(TaskPriority._member_names_)
|
||||
),
|
||||
vol.Optional(ATTR_UP_DOWN): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_COUNTER_UP): vol.All(int, vol.Range(0)),
|
||||
vol.Optional(ATTR_COUNTER_DOWN): vol.All(int, vol.Range(0)),
|
||||
vol.Optional(ATTR_FREQUENCY): vol.Coerce(Frequency),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_UPDATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_REMOVE_TAG): vol.All(cv.ensure_list, [str]),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_CREATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(ATTR_NAME): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_GET_TASKS_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector({"integration": DOMAIN}),
|
||||
@@ -138,6 +186,12 @@ ITEMID_MAP = {
|
||||
"shiny_seed": Skill.SHINY_SEED,
|
||||
}
|
||||
|
||||
SERVICE_TASK_TYPE_MAP = {
|
||||
SERVICE_UPDATE_REWARD: TaskType.REWARD,
|
||||
SERVICE_CREATE_REWARD: TaskType.REWARD,
|
||||
SERVICE_UPDATE_HABIT: TaskType.HABIT,
|
||||
}
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
@@ -516,6 +570,169 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
|
||||
return result
|
||||
|
||||
async def create_or_update_task(call: ServiceCall) -> ServiceResponse: # noqa: C901
|
||||
"""Create or update task action."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
await coordinator.async_refresh()
|
||||
is_update = call.service in (SERVICE_UPDATE_REWARD, SERVICE_UPDATE_HABIT)
|
||||
current_task = None
|
||||
|
||||
if is_update:
|
||||
try:
|
||||
current_task = next(
|
||||
task
|
||||
for task in coordinator.data.tasks
|
||||
if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text)
|
||||
and task.Type is SERVICE_TASK_TYPE_MAP[call.service]
|
||||
)
|
||||
except StopIteration as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="task_not_found",
|
||||
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
|
||||
) from e
|
||||
|
||||
data = Task()
|
||||
|
||||
if not is_update:
|
||||
data["type"] = TaskType.REWARD
|
||||
|
||||
if (text := call.data.get(ATTR_RENAME)) or (text := call.data.get(ATTR_NAME)):
|
||||
data["text"] = text
|
||||
|
||||
if (notes := call.data.get(ATTR_NOTES)) is not None:
|
||||
data["notes"] = notes
|
||||
|
||||
tags = cast(list[str], call.data.get(ATTR_TAG))
|
||||
remove_tags = cast(list[str], call.data.get(ATTR_REMOVE_TAG))
|
||||
|
||||
if tags or remove_tags:
|
||||
update_tags = set(current_task.tags) if current_task else set()
|
||||
user_tags = {
|
||||
tag.name.lower(): tag.id
|
||||
for tag in coordinator.data.user.tags
|
||||
if tag.id and tag.name
|
||||
}
|
||||
|
||||
if tags:
|
||||
# Creates new tag if it doesn't exist
|
||||
async def create_tag(tag_name: str) -> UUID:
|
||||
tag_id = (await coordinator.habitica.create_tag(tag_name)).data.id
|
||||
if TYPE_CHECKING:
|
||||
assert tag_id
|
||||
return tag_id
|
||||
|
||||
try:
|
||||
update_tags.update(
|
||||
{
|
||||
user_tags.get(tag_name.lower())
|
||||
or (await create_tag(tag_name))
|
||||
for tag_name in tags
|
||||
}
|
||||
)
|
||||
except TooManyRequestsError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_rate_limit_exception",
|
||||
translation_placeholders={"retry_after": str(e.retry_after)},
|
||||
) from e
|
||||
except HabiticaException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
translation_placeholders={"reason": str(e.error.message)},
|
||||
) from e
|
||||
except ClientError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
translation_placeholders={"reason": str(e)},
|
||||
) from e
|
||||
|
||||
if remove_tags:
|
||||
update_tags.difference_update(
|
||||
{
|
||||
user_tags[tag_name.lower()]
|
||||
for tag_name in remove_tags
|
||||
if tag_name.lower() in user_tags
|
||||
}
|
||||
)
|
||||
|
||||
data["tags"] = list(update_tags)
|
||||
|
||||
if (alias := call.data.get(ATTR_ALIAS)) is not None:
|
||||
data["alias"] = alias
|
||||
|
||||
if (cost := call.data.get(ATTR_COST)) is not None:
|
||||
data["value"] = cost
|
||||
|
||||
if priority := call.data.get(ATTR_PRIORITY):
|
||||
data["priority"] = TaskPriority[priority]
|
||||
|
||||
if frequency := call.data.get(ATTR_FREQUENCY):
|
||||
data["frequency"] = frequency
|
||||
|
||||
if up_down := call.data.get(ATTR_UP_DOWN):
|
||||
data["up"] = "up" in up_down
|
||||
data["down"] = "down" in up_down
|
||||
|
||||
if counter_up := call.data.get(ATTR_COUNTER_UP):
|
||||
data["counterUp"] = counter_up
|
||||
|
||||
if counter_down := call.data.get(ATTR_COUNTER_DOWN):
|
||||
data["counterDown"] = counter_down
|
||||
|
||||
try:
|
||||
if is_update:
|
||||
if TYPE_CHECKING:
|
||||
assert current_task
|
||||
assert current_task.id
|
||||
response = await coordinator.habitica.update_task(current_task.id, data)
|
||||
else:
|
||||
response = await coordinator.habitica.create_task(data)
|
||||
except TooManyRequestsError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="setup_rate_limit_exception",
|
||||
translation_placeholders={"retry_after": str(e.retry_after)},
|
||||
) from e
|
||||
except HabiticaException as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
translation_placeholders={"reason": str(e.error.message)},
|
||||
) from e
|
||||
except ClientError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="service_call_exception",
|
||||
translation_placeholders={"reason": str(e)},
|
||||
) from e
|
||||
else:
|
||||
return response.data.to_dict(omit_none=True)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE_REWARD,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_UPDATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE_HABIT,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_UPDATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_CREATE_REWARD,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_CREATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_API_CALL,
|
||||
|
||||
@@ -140,3 +140,115 @@ get_tasks:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
update_reward:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
rename: &rename
|
||||
selector:
|
||||
text:
|
||||
notes: ¬es
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
cost:
|
||||
required: false
|
||||
selector: &cost_selector
|
||||
number:
|
||||
min: 0
|
||||
step: 0.01
|
||||
unit_of_measurement: "🪙"
|
||||
mode: box
|
||||
tag_options: &tag_options
|
||||
collapsed: true
|
||||
fields:
|
||||
tag: &tag
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
remove_tag:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
developer_options: &developer_options
|
||||
collapsed: true
|
||||
fields:
|
||||
alias: &alias
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
create_reward:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
name:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
notes: *notes
|
||||
cost:
|
||||
required: true
|
||||
selector: *cost_selector
|
||||
tag: *tag
|
||||
developer_options: *developer_options
|
||||
update_habit:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
rename: *rename
|
||||
notes: *notes
|
||||
up_down:
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- value: up
|
||||
label: "➕"
|
||||
- value: down
|
||||
label: "➖"
|
||||
multiple: true
|
||||
mode: list
|
||||
priority:
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "trivial"
|
||||
- "easy"
|
||||
- "medium"
|
||||
- "hard"
|
||||
mode: dropdown
|
||||
translation_key: "priority"
|
||||
frequency:
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "daily"
|
||||
- "weekly"
|
||||
- "monthly"
|
||||
translation_key: "frequency"
|
||||
mode: dropdown
|
||||
tag_options: *tag_options
|
||||
developer_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
counter_up:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
step: 1
|
||||
unit_of_measurement: "➕"
|
||||
mode: box
|
||||
counter_down:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
step: 1
|
||||
unit_of_measurement: "➖"
|
||||
mode: box
|
||||
alias: *alias
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user