mirror of
https://github.com/home-assistant/core.git
synced 2025-11-10 19:40:11 +00:00
Compare commits
485 Commits
copilot/fi
...
llm-python
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
176f9c9f94 | ||
|
|
9f17a8a943 | ||
|
|
b44c47cd80 | ||
|
|
e80c090932 | ||
|
|
ff418f513a | ||
|
|
b222cc5889 | ||
|
|
db1707fd72 | ||
|
|
6f6f5809d0 | ||
|
|
1f43f82ea6 | ||
|
|
942274234e | ||
|
|
f03955b773 | ||
|
|
27ac375183 | ||
|
|
c951728767 | ||
|
|
3496494290 | ||
|
|
e90183391e | ||
|
|
90558c517b | ||
|
|
7fba94747e | ||
|
|
3b4b478afa | ||
|
|
a3640c5664 | ||
|
|
246a181ad4 | ||
|
|
d642ecb302 | ||
|
|
53889165b5 | ||
|
|
fe32e74910 | ||
|
|
a71ae4db37 | ||
|
|
0d5ebdb692 | ||
|
|
80e720f663 | ||
|
|
616b031df8 | ||
|
|
bcdece4455 | ||
|
|
1aa3efaf8a | ||
|
|
7f16b11776 | ||
|
|
078b7224fc | ||
|
|
d5970e7733 | ||
|
|
d5a74892e6 | ||
|
|
793a829236 | ||
|
|
7670146faf | ||
|
|
eaedefe105 | ||
|
|
4f20776e0e | ||
|
|
6c21a14be4 | ||
|
|
9015743483 | ||
|
|
2a62e033dd | ||
|
|
f72f2a326a | ||
|
|
61de50dfc0 | ||
|
|
ef7ed026db | ||
|
|
abdb48e7ce | ||
|
|
9646aa232a | ||
|
|
635cfe7d17 | ||
|
|
1e2f7cadc7 | ||
|
|
94e9f32da5 | ||
|
|
b7ba99ed17 | ||
|
|
ebbeef8021 | ||
|
|
8da75490c0 | ||
|
|
bc89e8fd3c | ||
|
|
602497904b | ||
|
|
facf217b99 | ||
|
|
b300654e15 | ||
|
|
a742125f13 | ||
|
|
64768b1036 | ||
|
|
792bb5781d | ||
|
|
7bd126dc8e | ||
|
|
83ee380b17 | ||
|
|
58f8b3c401 | ||
|
|
2a6d1180f4 | ||
|
|
00b765893d | ||
|
|
3e9e9b0489 | ||
|
|
25f7c02498 | ||
|
|
a785f3d509 | ||
|
|
9f36b2dcde | ||
|
|
57265ac648 | ||
|
|
f5fe53a67f | ||
|
|
7e6ceee9d1 | ||
|
|
9c21965a34 | ||
|
|
1ea740d81c | ||
|
|
6e98446523 | ||
|
|
2248584a0f | ||
|
|
d9b6f82639 | ||
|
|
3eecfa8e57 | ||
|
|
382e7dfd39 | ||
|
|
5358c89bfd | ||
|
|
e6103fdcf4 | ||
|
|
02dca5f0ad | ||
|
|
cc4b9e0eca | ||
|
|
7e28e3dcd3 | ||
|
|
bb3d571887 | ||
|
|
5a789cbbc8 | ||
|
|
4954c2a84b | ||
|
|
f28e9f60ee | ||
|
|
6a4bf4ec72 | ||
|
|
12706178c2 | ||
|
|
ed39b18d94 | ||
|
|
9999807891 | ||
|
|
b5db0e98b4 | ||
|
|
f58b2177a2 | ||
|
|
4f64014816 | ||
|
|
cf68214c4d | ||
|
|
b3d3284f5c | ||
|
|
12c346f550 | ||
|
|
bda82e19a5 | ||
|
|
f7726a7563 | ||
|
|
2c0ed2cbfe | ||
|
|
13376ef896 | ||
|
|
d18cc3d6c3 | ||
|
|
b40aab479a | ||
|
|
721f9a40d8 | ||
|
|
eb4b75a9a7 | ||
|
|
b40f381164 | ||
|
|
51413b7a8d | ||
|
|
ff694a0058 | ||
|
|
eea04558a9 | ||
|
|
5ad2a27918 | ||
|
|
f39305f64e | ||
|
|
7fba0ca2c0 | ||
|
|
51fbccd125 | ||
|
|
5fc2e6ed53 | ||
|
|
5a7f7d90a0 | ||
|
|
6d34d34ce1 | ||
|
|
6454f40c3c | ||
|
|
53e40a6b8c | ||
|
|
8a54a1d95c | ||
|
|
8a52e9ca01 | ||
|
|
d9ca253c6c | ||
|
|
b7853ea9bd | ||
|
|
d19e410ea8 | ||
|
|
83f911e4ff | ||
|
|
452322e971 | ||
|
|
6fa7c6cb81 | ||
|
|
ed6072d46b | ||
|
|
9fdc632780 | ||
|
|
4d426c31f9 | ||
|
|
ea946c90b3 | ||
|
|
fb68b2d454 | ||
|
|
2ebe0a929e | ||
|
|
c1e5a7efc9 | ||
|
|
561ef7015c | ||
|
|
b4270e019e | ||
|
|
614bf96fb9 | ||
|
|
ca290ee631 | ||
|
|
ad3174f6e6 | ||
|
|
218b0738ca | ||
|
|
98e6e20079 | ||
|
|
89aa349881 | ||
|
|
07930b12d0 | ||
|
|
711afa306c | ||
|
|
a3904ce60c | ||
|
|
455cf2fb42 | ||
|
|
072ae2b955 | ||
|
|
2b70639b11 | ||
|
|
2612dbeb9b | ||
|
|
7ebdd24224 | ||
|
|
66ff1cf005 | ||
|
|
08aae4bf49 | ||
|
|
313b5a483c | ||
|
|
8edbcc92d3 | ||
|
|
067cab71fa | ||
|
|
596e4883b1 | ||
|
|
fb4a452872 | ||
|
|
5b232226e9 | ||
|
|
db81610983 | ||
|
|
8f5c8caf07 | ||
|
|
f6af524ddf | ||
|
|
e0a8c9b458 | ||
|
|
c46412ee5b | ||
|
|
a06df2a680 | ||
|
|
68fbcc8665 | ||
|
|
6cde5cfdcc | ||
|
|
5605f5896a | ||
|
|
93c30f1b59 | ||
|
|
6e3ccbefc2 | ||
|
|
715dc12792 | ||
|
|
9cae0e0acc | ||
|
|
e13702d9b1 | ||
|
|
3b358df9e7 | ||
|
|
e394435d7c | ||
|
|
9e398ffc10 | ||
|
|
065a53a90d | ||
|
|
91f6b8e1fe | ||
|
|
1a9d1a9649 | ||
|
|
cb7c7767b5 | ||
|
|
d02029143c | ||
|
|
3eda687d30 | ||
|
|
7688c367cc | ||
|
|
a1dc3f3eac | ||
|
|
d135d08813 | ||
|
|
9595759fd1 | ||
|
|
d54f979612 | ||
|
|
531073acc0 | ||
|
|
73cbc962f9 | ||
|
|
34b0b71375 | ||
|
|
203c908730 | ||
|
|
23e6148d3b | ||
|
|
2a5a66f9d5 | ||
|
|
84ce5d65e1 | ||
|
|
00c7838587 | ||
|
|
d8b576c087 | ||
|
|
330dce24c5 | ||
|
|
0089d3efa1 | ||
|
|
167e9c8f4a | ||
|
|
c7f5e25d41 | ||
|
|
7b5dd4a0ec | ||
|
|
84de6aacfc | ||
|
|
9561c84920 | ||
|
|
7572b2a669 | ||
|
|
b48409ab1b | ||
|
|
ab04e2c501 | ||
|
|
38e6a7c6d4 | ||
|
|
c2b284de2d | ||
|
|
b760bf342a | ||
|
|
79cfea3fea | ||
|
|
69ace08c01 | ||
|
|
bf33e286d6 | ||
|
|
6b83effc5f | ||
|
|
2b158fe690 | ||
|
|
712ddc03c8 | ||
|
|
efe519faad | ||
|
|
1b7cb418eb | ||
|
|
c678bcd4f1 | ||
|
|
0eaea13e8d | ||
|
|
b1e4513f7d | ||
|
|
6d7f8bb7d7 | ||
|
|
b481aaba77 | ||
|
|
d539f37aa4 | ||
|
|
865b3a6646 | ||
|
|
1c603f968f | ||
|
|
d821d27730 | ||
|
|
dfa060a7e1 | ||
|
|
5262cca8e6 | ||
|
|
2c36a74da5 | ||
|
|
084cde6ecf | ||
|
|
3e34aa5fb7 | ||
|
|
268f0d9e03 | ||
|
|
f8d3bc1b89 | ||
|
|
fb64ff1d17 | ||
|
|
ff72faf83a | ||
|
|
acb58c41eb | ||
|
|
586b197fc3 | ||
|
|
5c1d16d582 | ||
|
|
73be4625ae | ||
|
|
775701133d | ||
|
|
1af0282091 | ||
|
|
c876bed33f | ||
|
|
e9d39a826e | ||
|
|
f9e1c07c04 | ||
|
|
c0bef51563 | ||
|
|
b41a9575af | ||
|
|
e585b3abd1 | ||
|
|
5d2877f454 | ||
|
|
2d89c60ac5 | ||
|
|
860a7b7d91 | ||
|
|
5585376b40 | ||
|
|
c4cb70fc06 | ||
|
|
981ae39182 | ||
|
|
dff4f79925 | ||
|
|
bf64e11960 | ||
|
|
823d20c67f | ||
|
|
1a654cd35d | ||
|
|
13e592edaf | ||
|
|
94191239c6 | ||
|
|
91a1ca09f7 | ||
|
|
9f1fe8a067 | ||
|
|
f2c9cdb09e | ||
|
|
712115cdb8 | ||
|
|
eb6ae9d2d6 | ||
|
|
b126f3fa66 | ||
|
|
2d720f0d32 | ||
|
|
c0155f5e80 | ||
|
|
23a2d69984 | ||
|
|
a8779d5f52 | ||
|
|
01c197e830 | ||
|
|
ef4f476844 | ||
|
|
8aee05b8b0 | ||
|
|
0f3f8d5707 | ||
|
|
2948b1c58e | ||
|
|
4cb2af4d08 | ||
|
|
8e12d2028d | ||
|
|
5b046def8e | ||
|
|
6a81bf6f5e | ||
|
|
102d6a37c0 | ||
|
|
fd6aba3022 | ||
|
|
a88eadf863 | ||
|
|
52f0d04c38 | ||
|
|
3ab80c6ff2 | ||
|
|
71485871c8 | ||
|
|
ba0da4c2a3 | ||
|
|
cbaadebac3 | ||
|
|
fd0ae32058 | ||
|
|
382bf78ee0 | ||
|
|
6aa077a48d | ||
|
|
b638fcbaad | ||
|
|
704edac9fd | ||
|
|
ff9e2a8f1e | ||
|
|
d778afe61a | ||
|
|
448084e2b5 | ||
|
|
d99379ffdf | ||
|
|
b835b7f266 | ||
|
|
e96e97edca | ||
|
|
df7c657d7e | ||
|
|
4f5502ab47 | ||
|
|
c30ee776e9 | ||
|
|
efebdc0181 | ||
|
|
da7fc88f1f | ||
|
|
566aeb5e9a | ||
|
|
d17f0ef55a | ||
|
|
35025c4b59 | ||
|
|
e5d512d5e5 | ||
|
|
2b5028bfb7 | ||
|
|
757fee9f73 | ||
|
|
06130219b4 | ||
|
|
4e2fe63182 | ||
|
|
d0cc9990dd | ||
|
|
76ca9ce3a4 | ||
|
|
124e7cf4c8 | ||
|
|
260ea9a3be | ||
|
|
e1f6820cb6 | ||
|
|
2215777cfb | ||
|
|
fa3ce62ae8 | ||
|
|
33421bddf3 | ||
|
|
1efe2b437d | ||
|
|
a54f0adf74 | ||
|
|
afe574f74e | ||
|
|
25aae8944d | ||
|
|
f26e6ad211 | ||
|
|
e9444a2e4d | ||
|
|
60988534a9 | ||
|
|
932bf81ac8 | ||
|
|
1302b6744e | ||
|
|
0aeff366bd | ||
|
|
0db23b0da6 | ||
|
|
863e2074b6 | ||
|
|
13828f6713 | ||
|
|
fdb38ec8ec | ||
|
|
55abb6e594 | ||
|
|
a83e4f5c63 | ||
|
|
cba15ee439 | ||
|
|
400620399a | ||
|
|
28e19215ad | ||
|
|
119d0a0170 | ||
|
|
69faf38e86 | ||
|
|
d0ef1a1a8b | ||
|
|
8f328810bf | ||
|
|
4f1b75e3b4 | ||
|
|
445a7fc749 | ||
|
|
977c0797aa | ||
|
|
a24f027923 | ||
|
|
7b45798e30 | ||
|
|
2b0cda0ad1 | ||
|
|
12dca4b1bf | ||
|
|
8c509b11b2 | ||
|
|
991c9008bd | ||
|
|
fe95f6e1c5 | ||
|
|
37510aa316 | ||
|
|
4e40e9bf74 | ||
|
|
70c9b1f095 | ||
|
|
f714388130 | ||
|
|
ffb2a693f4 | ||
|
|
9d8e253ad3 | ||
|
|
31631cc882 | ||
|
|
3a64357201 | ||
|
|
20fdec9e9c | ||
|
|
064a63fe1f | ||
|
|
803654223a | ||
|
|
a6148b50cf | ||
|
|
02a3c5be14 | ||
|
|
08ea640629 | ||
|
|
7dd761c9c3 | ||
|
|
6b827dfc33 | ||
|
|
67c19087dd | ||
|
|
55c7c2f730 | ||
|
|
afee936c3d | ||
|
|
ed2ced6c36 | ||
|
|
4c5cf028d7 | ||
|
|
68faa897ad | ||
|
|
53c9c42148 | ||
|
|
d48cc03be7 | ||
|
|
28236aa023 | ||
|
|
bfae07135a | ||
|
|
99d580e371 | ||
|
|
4d53450cbf | ||
|
|
1fbce01e26 | ||
|
|
a9621ac811 | ||
|
|
94f2118b19 | ||
|
|
73ca6b4900 | ||
|
|
31e647b5b0 | ||
|
|
fac5b2c09c | ||
|
|
ae48179e95 | ||
|
|
88c9d5dbe3 | ||
|
|
b76f47cd9f | ||
|
|
822e1ffc8d | ||
|
|
1632e0aef6 | ||
|
|
e2bc73f153 | ||
|
|
46cfdddc80 | ||
|
|
0bdf6757c4 | ||
|
|
312e590360 | ||
|
|
7a6aaf667b | ||
|
|
33eaca24d6 | ||
|
|
3d27d501b1 | ||
|
|
39b651e075 | ||
|
|
a962777a2e | ||
|
|
594ce8f266 | ||
|
|
9f867f268c | ||
|
|
9edd242734 | ||
|
|
93e11aa8bc | ||
|
|
c2b298283e | ||
|
|
106c086e8b | ||
|
|
cbf4130bff | ||
|
|
afffe0b08b | ||
|
|
c1ccfee7cc | ||
|
|
8d8383e1c1 | ||
|
|
f350a1a1fa | ||
|
|
fe2bd8d09e | ||
|
|
cf14226b02 | ||
|
|
bd3fe1d4ad | ||
|
|
377ca04be8 | ||
|
|
5837f55205 | ||
|
|
0766edb9c4 | ||
|
|
e62e3778f3 | ||
|
|
aa8e4c1c15 | ||
|
|
46ed8a73fc | ||
|
|
83f22497ae | ||
|
|
3dda1685dc | ||
|
|
6fa9d42401 | ||
|
|
1a54d566f8 | ||
|
|
1a9cae0f89 | ||
|
|
551dcaa169 | ||
|
|
5467db065b | ||
|
|
6a8d752e56 | ||
|
|
179a56628d | ||
|
|
b3f830773a | ||
|
|
084e06ec7d | ||
|
|
e0190afd3c | ||
|
|
b9e16d54c4 | ||
|
|
627785edc1 | ||
|
|
4318e29ce8 | ||
|
|
fea5c63bba | ||
|
|
b2349ac2bd | ||
|
|
08f7b708a4 | ||
|
|
1236801b7d | ||
|
|
72d9dbf39d | ||
|
|
755864f9f3 | ||
|
|
fa476d4e34 | ||
|
|
018197e41a | ||
|
|
7dd2b9e422 | ||
|
|
3e615fd373 | ||
|
|
c0bf167e10 | ||
|
|
45f6778ff4 | ||
|
|
bddd4d621a | ||
|
|
b0e75e9ee4 | ||
|
|
d45c03a795 | ||
|
|
8562c8d32f | ||
|
|
ae42d71123 | ||
|
|
9616c8cd7b | ||
|
|
9394546668 | ||
|
|
d43f21c2e2 | ||
|
|
8d68fee9f8 | ||
|
|
b4a4e218ec | ||
|
|
fb2d62d692 | ||
|
|
f538807d6e | ||
|
|
a08c3c9f44 | ||
|
|
506431c75f | ||
|
|
37579440e6 | ||
|
|
5ce2729dc2 | ||
|
|
b5e4ae4a53 | ||
|
|
3d4386ea6d | ||
|
|
9f1cec893e | ||
|
|
bc87140a6f | ||
|
|
d77a3fca83 | ||
|
|
924a86dfb6 | ||
|
|
0d7608f7c5 | ||
|
|
22e054f4cd | ||
|
|
8b53b26333 | ||
|
|
4d59e8cd80 | ||
|
|
61396d92a5 | ||
|
|
c72c600de4 | ||
|
|
b86b0c10bd | ||
|
|
eb222f6c5d | ||
|
|
4b5fe424ed | ||
|
|
61ca42e923 | ||
|
|
21c1427abf | ||
|
|
aa6b37bc7c | ||
|
|
bbc1466cfc | ||
|
|
21a9799060 | ||
|
|
f7d54b46ec | ||
|
|
6ad1b8dcb1 | ||
|
|
5f6b1212a3 | ||
|
|
58dc6a952e | ||
|
|
59d8df142d | ||
|
|
04fb86b4ba |
28
.github/workflows/builder.yml
vendored
28
.github/workflows/builder.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
publish: ${{ steps.version.outputs.publish }}
|
publish: ${{ steps.version.outputs.publish }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
|
|
||||||
@@ -90,7 +90,7 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Download nightly wheels of frontend
|
- name: Download nightly wheels of frontend
|
||||||
if: needs.init.outputs.channel == 'dev'
|
if: needs.init.outputs.channel == 'dev'
|
||||||
@@ -175,7 +175,7 @@ jobs:
|
|||||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@@ -190,7 +190,7 @@ jobs:
|
|||||||
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
echo "${{ github.sha }};${{ github.ref }};${{ github.event_name }};${{ github.actor }}" > rootfs/OFFICIAL_IMAGE
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.4.0
|
uses: docker/login-action@v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -242,7 +242,7 @@ jobs:
|
|||||||
- green
|
- green
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Set build additional args
|
- name: Set build additional args
|
||||||
run: |
|
run: |
|
||||||
@@ -256,7 +256,7 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@v3.4.0
|
uses: docker/login-action@v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -279,7 +279,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Initialize git
|
- name: Initialize git
|
||||||
uses: home-assistant/actions/helpers/git-init@master
|
uses: home-assistant/actions/helpers/git-init@master
|
||||||
@@ -321,7 +321,7 @@ jobs:
|
|||||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Install Cosign
|
- name: Install Cosign
|
||||||
uses: sigstore/cosign-installer@v3.9.2
|
uses: sigstore/cosign-installer@v3.9.2
|
||||||
@@ -330,14 +330,14 @@ jobs:
|
|||||||
|
|
||||||
- name: Login to DockerHub
|
- name: Login to DockerHub
|
||||||
if: matrix.registry == 'docker.io/homeassistant'
|
if: matrix.registry == 'docker.io/homeassistant'
|
||||||
uses: docker/login-action@v3.4.0
|
uses: docker/login-action@v3.5.0
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||||
uses: docker/login-action@v3.4.0
|
uses: docker/login-action@v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
@@ -454,7 +454,7 @@ jobs:
|
|||||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -462,7 +462,7 @@ jobs:
|
|||||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||||
|
|
||||||
- name: Download translations
|
- name: Download translations
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: translations
|
name: translations
|
||||||
|
|
||||||
@@ -499,10 +499,10 @@ jobs:
|
|||||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
|
||||||
|
|
||||||
- name: Login to GitHub Container Registry
|
- name: Login to GitHub Container Registry
|
||||||
uses: docker/login-action@74a5d142397b4f367a81961eba4e8cd7edddf772 # v3.4.0
|
uses: docker/login-action@184bdaa0721073962dff0199f1fb9940f07167d1 # v3.5.0
|
||||||
with:
|
with:
|
||||||
registry: ghcr.io
|
registry: ghcr.io
|
||||||
username: ${{ github.repository_owner }}
|
username: ${{ github.repository_owner }}
|
||||||
|
|||||||
96
.github/workflows/ci.yaml
vendored
96
.github/workflows/ci.yaml
vendored
@@ -37,7 +37,7 @@ on:
|
|||||||
type: boolean
|
type: boolean
|
||||||
|
|
||||||
env:
|
env:
|
||||||
CACHE_VERSION: 4
|
CACHE_VERSION: 5
|
||||||
UV_CACHE_VERSION: 1
|
UV_CACHE_VERSION: 1
|
||||||
MYPY_CACHE_VERSION: 1
|
MYPY_CACHE_VERSION: 1
|
||||||
HA_SHORT_VERSION: "2025.9"
|
HA_SHORT_VERSION: "2025.9"
|
||||||
@@ -94,7 +94,7 @@ jobs:
|
|||||||
runs-on: ubuntu-24.04
|
runs-on: ubuntu-24.04
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Generate partial Python venv restore key
|
- name: Generate partial Python venv restore key
|
||||||
id: generate_python_cache_key
|
id: generate_python_cache_key
|
||||||
run: |
|
run: |
|
||||||
@@ -246,7 +246,7 @@ jobs:
|
|||||||
- info
|
- info
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -255,7 +255,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@@ -271,7 +271,7 @@ jobs:
|
|||||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
lookup-only: true
|
lookup-only: true
|
||||||
@@ -292,7 +292,7 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
@@ -301,7 +301,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -310,7 +310,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -332,7 +332,7 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
@@ -341,7 +341,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -350,7 +350,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -372,7 +372,7 @@ jobs:
|
|||||||
- pre-commit
|
- pre-commit
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
id: python
|
id: python
|
||||||
@@ -381,7 +381,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -390,7 +390,7 @@ jobs:
|
|||||||
needs.info.outputs.pre-commit_cache_key }}
|
needs.info.outputs.pre-commit_cache_key }}
|
||||||
- name: Restore pre-commit environment from cache
|
- name: Restore pre-commit environment from cache
|
||||||
id: cache-precommit
|
id: cache-precommit
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -462,7 +462,7 @@ jobs:
|
|||||||
- script/hassfest/docker/Dockerfile
|
- script/hassfest/docker/Dockerfile
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Register hadolint problem matcher
|
- name: Register hadolint problem matcher
|
||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
|
||||||
@@ -481,7 +481,7 @@ jobs:
|
|||||||
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
python-version: ${{ fromJSON(needs.info.outputs.python_versions) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -497,7 +497,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
key: >-
|
key: >-
|
||||||
@@ -505,7 +505,7 @@ jobs:
|
|||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore uv wheel cache
|
- name: Restore uv wheel cache
|
||||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: ${{ env.UV_CACHE_DIR }}
|
path: ${{ env.UV_CACHE_DIR }}
|
||||||
key: >-
|
key: >-
|
||||||
@@ -584,7 +584,7 @@ jobs:
|
|||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
libturbojpeg
|
libturbojpeg
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -593,7 +593,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -617,7 +617,7 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -626,7 +626,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -651,7 +651,7 @@ jobs:
|
|||||||
&& github.event_name == 'pull_request'
|
&& github.event_name == 'pull_request'
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Dependency review
|
- name: Dependency review
|
||||||
uses: actions/dependency-review-action@v4.7.1
|
uses: actions/dependency-review-action@v4.7.1
|
||||||
with:
|
with:
|
||||||
@@ -674,7 +674,7 @@ jobs:
|
|||||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -683,7 +683,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -717,7 +717,7 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -726,7 +726,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -764,7 +764,7 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -773,7 +773,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -809,7 +809,7 @@ jobs:
|
|||||||
- base
|
- base
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -825,7 +825,7 @@ jobs:
|
|||||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -833,7 +833,7 @@ jobs:
|
|||||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||||
needs.info.outputs.python_cache_key }}
|
needs.info.outputs.python_cache_key }}
|
||||||
- name: Restore mypy cache
|
- name: Restore mypy cache
|
||||||
uses: actions/cache@v4.2.3
|
uses: actions/cache@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: .mypy_cache
|
path: .mypy_cache
|
||||||
key: >-
|
key: >-
|
||||||
@@ -886,7 +886,7 @@ jobs:
|
|||||||
libturbojpeg \
|
libturbojpeg \
|
||||||
libgammu-dev
|
libgammu-dev
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -895,7 +895,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore base Python virtual environment
|
- name: Restore base Python virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -947,7 +947,7 @@ jobs:
|
|||||||
libgammu-dev \
|
libgammu-dev \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -956,7 +956,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -970,7 +970,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||||
- name: Download pytest_buckets
|
- name: Download pytest_buckets
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: pytest_buckets
|
name: pytest_buckets
|
||||||
- name: Compile English translations
|
- name: Compile English translations
|
||||||
@@ -1080,7 +1080,7 @@ jobs:
|
|||||||
libmariadb-dev-compat \
|
libmariadb-dev-compat \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -1089,7 +1089,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1222,7 +1222,7 @@ jobs:
|
|||||||
sudo apt-get -y install \
|
sudo apt-get -y install \
|
||||||
postgresql-server-dev-14
|
postgresql-server-dev-14
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -1231,7 +1231,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1334,9 +1334,9 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
@@ -1381,7 +1381,7 @@ jobs:
|
|||||||
libgammu-dev \
|
libgammu-dev \
|
||||||
libxml2-utils
|
libxml2-utils
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Set up Python ${{ matrix.python-version }}
|
- name: Set up Python ${{ matrix.python-version }}
|
||||||
id: python
|
id: python
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
@@ -1390,7 +1390,7 @@ jobs:
|
|||||||
check-latest: true
|
check-latest: true
|
||||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||||
id: cache-venv
|
id: cache-venv
|
||||||
uses: actions/cache/restore@v4.2.3
|
uses: actions/cache/restore@v4.2.4
|
||||||
with:
|
with:
|
||||||
path: venv
|
path: venv
|
||||||
fail-on-cache-miss: true
|
fail-on-cache-miss: true
|
||||||
@@ -1484,9 +1484,9 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: coverage-*
|
pattern: coverage-*
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
@@ -1511,7 +1511,7 @@ jobs:
|
|||||||
timeout-minutes: 10
|
timeout-minutes: 10
|
||||||
steps:
|
steps:
|
||||||
- name: Download all coverage artifacts
|
- name: Download all coverage artifacts
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
pattern: test-results-*
|
pattern: test-results-*
|
||||||
- name: Upload test results to Codecov
|
- name: Upload test results to Codecov
|
||||||
|
|||||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Check out code from GitHub
|
- name: Check out code from GitHub
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Initialize CodeQL
|
- name: Initialize CodeQL
|
||||||
uses: github/codeql-action/init@v3.29.5
|
uses: github/codeql-action/init@v3.29.9
|
||||||
with:
|
with:
|
||||||
languages: python
|
languages: python
|
||||||
|
|
||||||
- name: Perform CodeQL Analysis
|
- name: Perform CodeQL Analysis
|
||||||
uses: github/codeql-action/analyze@v3.29.5
|
uses: github/codeql-action/analyze@v3.29.9
|
||||||
with:
|
with:
|
||||||
category: "/language:python"
|
category: "/language:python"
|
||||||
|
|||||||
@@ -231,7 +231,7 @@ jobs:
|
|||||||
- name: Detect duplicates using AI
|
- name: Detect duplicates using AI
|
||||||
id: ai_detection
|
id: ai_detection
|
||||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||||
uses: actions/ai-inference@v1.2.3
|
uses: actions/ai-inference@v2.0.0
|
||||||
with:
|
with:
|
||||||
model: openai/gpt-4o
|
model: openai/gpt-4o
|
||||||
system-prompt: |
|
system-prompt: |
|
||||||
|
|||||||
@@ -57,7 +57,7 @@ jobs:
|
|||||||
- name: Detect language using AI
|
- name: Detect language using AI
|
||||||
id: ai_language_detection
|
id: ai_language_detection
|
||||||
if: steps.detect_language.outputs.should_continue == 'true'
|
if: steps.detect_language.outputs.should_continue == 'true'
|
||||||
uses: actions/ai-inference@v1.2.3
|
uses: actions/ai-inference@v2.0.0
|
||||||
with:
|
with:
|
||||||
model: openai/gpt-4o-mini
|
model: openai/gpt-4o-mini
|
||||||
system-prompt: |
|
system-prompt: |
|
||||||
|
|||||||
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -9,7 +9,7 @@ jobs:
|
|||||||
check-authorization:
|
check-authorization:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
# Only run if this is a Task issue type (from the issue form)
|
# Only run if this is a Task issue type (from the issue form)
|
||||||
if: github.event.issue.issue_type == 'Task'
|
if: github.event.issue.type.name == 'Task'
|
||||||
steps:
|
steps:
|
||||||
- name: Check if user is authorized
|
- name: Check if user is authorized
|
||||||
uses: actions/github-script@v7
|
uses: actions/github-script@v7
|
||||||
|
|||||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
|||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
uses: actions/setup-python@v5.6.0
|
uses: actions/setup-python@v5.6.0
|
||||||
|
|||||||
24
.github/workflows/wheels.yml
vendored
24
.github/workflows/wheels.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
|||||||
architectures: ${{ steps.info.outputs.architectures }}
|
architectures: ${{ steps.info.outputs.architectures }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||||
id: python
|
id: python
|
||||||
@@ -135,20 +135,20 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download build_constraints
|
- name: Download build_constraints
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
@@ -159,7 +159,7 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2025.03.0
|
uses: home-assistant/wheels@2025.07.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
@@ -184,25 +184,25 @@ jobs:
|
|||||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout the repository
|
- name: Checkout the repository
|
||||||
uses: actions/checkout@v4.2.2
|
uses: actions/checkout@v5.0.0
|
||||||
|
|
||||||
- name: Download env_file
|
- name: Download env_file
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: env_file
|
name: env_file
|
||||||
|
|
||||||
- name: Download build_constraints
|
- name: Download build_constraints
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: build_constraints
|
name: build_constraints
|
||||||
|
|
||||||
- name: Download requirements_diff
|
- name: Download requirements_diff
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: requirements_diff
|
name: requirements_diff
|
||||||
|
|
||||||
- name: Download requirements_all_wheels
|
- name: Download requirements_all_wheels
|
||||||
uses: actions/download-artifact@v4.3.0
|
uses: actions/download-artifact@v5.0.0
|
||||||
with:
|
with:
|
||||||
name: requirements_all_wheels
|
name: requirements_all_wheels
|
||||||
|
|
||||||
@@ -219,7 +219,7 @@ jobs:
|
|||||||
sed -i "/uv/d" requirements_diff.txt
|
sed -i "/uv/d" requirements_diff.txt
|
||||||
|
|
||||||
- name: Build wheels
|
- name: Build wheels
|
||||||
uses: home-assistant/wheels@2025.03.0
|
uses: home-assistant/wheels@2025.07.0
|
||||||
with:
|
with:
|
||||||
abi: ${{ matrix.abi }}
|
abi: ${{ matrix.abi }}
|
||||||
tag: musllinux_1_2
|
tag: musllinux_1_2
|
||||||
|
|||||||
@@ -18,7 +18,7 @@ repos:
|
|||||||
exclude_types: [csv, json, html]
|
exclude_types: [csv, json, html]
|
||||||
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
exclude: ^tests/fixtures/|homeassistant/generated/|tests/components/.*/snapshots/
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v5.0.0
|
rev: v6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-executables-have-shebangs
|
- id: check-executables-have-shebangs
|
||||||
stages: [manual]
|
stages: [manual]
|
||||||
|
|||||||
@@ -310,7 +310,6 @@ homeassistant.components.letpot.*
|
|||||||
homeassistant.components.lidarr.*
|
homeassistant.components.lidarr.*
|
||||||
homeassistant.components.lifx.*
|
homeassistant.components.lifx.*
|
||||||
homeassistant.components.light.*
|
homeassistant.components.light.*
|
||||||
homeassistant.components.linear_garage_door.*
|
|
||||||
homeassistant.components.linkplay.*
|
homeassistant.components.linkplay.*
|
||||||
homeassistant.components.litejet.*
|
homeassistant.components.litejet.*
|
||||||
homeassistant.components.litterrobot.*
|
homeassistant.components.litterrobot.*
|
||||||
@@ -467,6 +466,7 @@ homeassistant.components.simplisafe.*
|
|||||||
homeassistant.components.siren.*
|
homeassistant.components.siren.*
|
||||||
homeassistant.components.skybell.*
|
homeassistant.components.skybell.*
|
||||||
homeassistant.components.slack.*
|
homeassistant.components.slack.*
|
||||||
|
homeassistant.components.sleep_as_android.*
|
||||||
homeassistant.components.sleepiq.*
|
homeassistant.components.sleepiq.*
|
||||||
homeassistant.components.smhi.*
|
homeassistant.components.smhi.*
|
||||||
homeassistant.components.smlight.*
|
homeassistant.components.smlight.*
|
||||||
|
|||||||
16
CODEOWNERS
generated
16
CODEOWNERS
generated
@@ -156,8 +156,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
||||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69
|
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||||
/tests/components/asuswrt/ @kennedyshead @ollo69
|
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||||
/homeassistant/components/atag/ @MatsNL
|
/homeassistant/components/atag/ @MatsNL
|
||||||
/tests/components/atag/ @MatsNL
|
/tests/components/atag/ @MatsNL
|
||||||
/homeassistant/components/aten_pe/ @mtdcr
|
/homeassistant/components/aten_pe/ @mtdcr
|
||||||
@@ -438,8 +438,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/enigma2/ @autinerd
|
/tests/components/enigma2/ @autinerd
|
||||||
/homeassistant/components/enocean/ @bdurrer
|
/homeassistant/components/enocean/ @bdurrer
|
||||||
/tests/components/enocean/ @bdurrer
|
/tests/components/enocean/ @bdurrer
|
||||||
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
/homeassistant/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||||
/tests/components/enphase_envoy/ @bdraco @cgarwood @joostlek @catsmanac
|
/tests/components/enphase_envoy/ @bdraco @cgarwood @catsmanac
|
||||||
/homeassistant/components/entur_public_transport/ @hfurubotten
|
/homeassistant/components/entur_public_transport/ @hfurubotten
|
||||||
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
/homeassistant/components/environment_canada/ @gwww @michaeldavie
|
||||||
/tests/components/environment_canada/ @gwww @michaeldavie
|
/tests/components/environment_canada/ @gwww @michaeldavie
|
||||||
@@ -862,8 +862,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/lifx/ @Djelibeybi
|
/tests/components/lifx/ @Djelibeybi
|
||||||
/homeassistant/components/light/ @home-assistant/core
|
/homeassistant/components/light/ @home-assistant/core
|
||||||
/tests/components/light/ @home-assistant/core
|
/tests/components/light/ @home-assistant/core
|
||||||
/homeassistant/components/linear_garage_door/ @IceBotYT
|
|
||||||
/tests/components/linear_garage_door/ @IceBotYT
|
|
||||||
/homeassistant/components/linkplay/ @Velleman
|
/homeassistant/components/linkplay/ @Velleman
|
||||||
/tests/components/linkplay/ @Velleman
|
/tests/components/linkplay/ @Velleman
|
||||||
/homeassistant/components/linux_battery/ @fabaff
|
/homeassistant/components/linux_battery/ @fabaff
|
||||||
@@ -1417,6 +1415,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/skybell/ @tkdrob
|
/tests/components/skybell/ @tkdrob
|
||||||
/homeassistant/components/slack/ @tkdrob @fletcherau
|
/homeassistant/components/slack/ @tkdrob @fletcherau
|
||||||
/tests/components/slack/ @tkdrob @fletcherau
|
/tests/components/slack/ @tkdrob @fletcherau
|
||||||
|
/homeassistant/components/sleep_as_android/ @tr4nt0r
|
||||||
|
/tests/components/sleep_as_android/ @tr4nt0r
|
||||||
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
/homeassistant/components/sleepiq/ @mfugate1 @kbickar
|
||||||
/tests/components/sleepiq/ @mfugate1 @kbickar
|
/tests/components/sleepiq/ @mfugate1 @kbickar
|
||||||
/homeassistant/components/slide/ @ualex73
|
/homeassistant/components/slide/ @ualex73
|
||||||
@@ -1599,6 +1599,8 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/todo/ @home-assistant/core
|
/tests/components/todo/ @home-assistant/core
|
||||||
/homeassistant/components/todoist/ @boralyl
|
/homeassistant/components/todoist/ @boralyl
|
||||||
/tests/components/todoist/ @boralyl
|
/tests/components/todoist/ @boralyl
|
||||||
|
/homeassistant/components/togrill/ @elupus
|
||||||
|
/tests/components/togrill/ @elupus
|
||||||
/homeassistant/components/tolo/ @MatthiasLohr
|
/homeassistant/components/tolo/ @MatthiasLohr
|
||||||
/tests/components/tolo/ @MatthiasLohr
|
/tests/components/tolo/ @MatthiasLohr
|
||||||
/homeassistant/components/tomorrowio/ @raman325 @lymanepp
|
/homeassistant/components/tomorrowio/ @raman325 @lymanepp
|
||||||
@@ -1613,8 +1615,6 @@ build.json @home-assistant/supervisor
|
|||||||
/tests/components/tplink_omada/ @MarkGodwin
|
/tests/components/tplink_omada/ @MarkGodwin
|
||||||
/homeassistant/components/traccar/ @ludeeus
|
/homeassistant/components/traccar/ @ludeeus
|
||||||
/tests/components/traccar/ @ludeeus
|
/tests/components/traccar/ @ludeeus
|
||||||
/homeassistant/components/traccar_server/ @ludeeus
|
|
||||||
/tests/components/traccar_server/ @ludeeus
|
|
||||||
/homeassistant/components/trace/ @home-assistant/core
|
/homeassistant/components/trace/ @home-assistant/core
|
||||||
/tests/components/trace/ @home-assistant/core
|
/tests/components/trace/ @home-assistant/core
|
||||||
/homeassistant/components/tractive/ @Danielhiversen @zhulik @bieniu
|
/homeassistant/components/tractive/ @Danielhiversen @zhulik @bieniu
|
||||||
|
|||||||
2
Dockerfile
generated
2
Dockerfile
generated
@@ -31,7 +31,7 @@ RUN \
|
|||||||
&& go2rtc --version
|
&& go2rtc --version
|
||||||
|
|
||||||
# Install uv
|
# Install uv
|
||||||
RUN pip3 install uv==0.7.1
|
RUN pip3 install uv==0.8.9
|
||||||
|
|
||||||
WORKDIR /usr/src
|
WORKDIR /usr/src
|
||||||
|
|
||||||
|
|||||||
@@ -120,6 +120,9 @@ class AuthStore:
|
|||||||
|
|
||||||
new_user = models.User(**kwargs)
|
new_user = models.User(**kwargs)
|
||||||
|
|
||||||
|
while new_user.id in self._users:
|
||||||
|
new_user = models.User(**kwargs)
|
||||||
|
|
||||||
self._users[new_user.id] = new_user
|
self._users[new_user.id] = new_user
|
||||||
|
|
||||||
if credentials is None:
|
if credentials is None:
|
||||||
|
|||||||
@@ -10,7 +10,10 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
|||||||
|
|
||||||
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
from .coordinator import AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||||
|
|
||||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
_PLATFORMS: list[Platform] = [
|
||||||
|
Platform.BINARY_SENSOR,
|
||||||
|
Platform.SENSOR,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> bool:
|
||||||
|
|||||||
106
homeassistant/components/airos/binary_sensor.py
Normal file
106
homeassistant/components/airos/binary_sensor.py
Normal file
@@ -0,0 +1,106 @@
|
|||||||
|
"""AirOS Binary Sensor component for Home Assistant."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from homeassistant.components.binary_sensor import (
|
||||||
|
BinarySensorDeviceClass,
|
||||||
|
BinarySensorEntity,
|
||||||
|
BinarySensorEntityDescription,
|
||||||
|
)
|
||||||
|
from homeassistant.const import EntityCategory
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
|
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
|
||||||
|
from .entity import AirOSEntity
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||||
|
"""Describe an AirOS binary sensor."""
|
||||||
|
|
||||||
|
value_fn: Callable[[AirOSData], bool]
|
||||||
|
|
||||||
|
|
||||||
|
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="portfw",
|
||||||
|
translation_key="port_forwarding",
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.portfw,
|
||||||
|
),
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="dhcp_client",
|
||||||
|
translation_key="dhcp_client",
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.services.dhcpc,
|
||||||
|
),
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="dhcp_server",
|
||||||
|
translation_key="dhcp_server",
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.services.dhcpd,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="dhcp6_server",
|
||||||
|
translation_key="dhcp6_server",
|
||||||
|
device_class=BinarySensorDeviceClass.RUNNING,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.services.dhcp6d_stateful,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
AirOSBinarySensorEntityDescription(
|
||||||
|
key="pppoe",
|
||||||
|
translation_key="pppoe",
|
||||||
|
device_class=BinarySensorDeviceClass.CONNECTIVITY,
|
||||||
|
entity_category=EntityCategory.DIAGNOSTIC,
|
||||||
|
value_fn=lambda data: data.services.pppoe,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
config_entry: AirOSConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up the AirOS binary sensors from a config entry."""
|
||||||
|
coordinator = config_entry.runtime_data
|
||||||
|
|
||||||
|
async_add_entities(
|
||||||
|
AirOSBinarySensor(coordinator, description) for description in BINARY_SENSORS
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class AirOSBinarySensor(AirOSEntity, BinarySensorEntity):
|
||||||
|
"""Representation of a binary sensor."""
|
||||||
|
|
||||||
|
entity_description: AirOSBinarySensorEntityDescription
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AirOSDataUpdateCoordinator,
|
||||||
|
description: AirOSBinarySensorEntityDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the binary sensor."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
|
||||||
|
self.entity_description = description
|
||||||
|
self._attr_unique_id = f"{coordinator.data.host.device_id}_{description.key}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_on(self) -> bool:
|
||||||
|
"""Return the state of the binary sensor."""
|
||||||
|
return self.entity_description.value_fn(self.coordinator.data)
|
||||||
@@ -6,11 +6,11 @@ import logging
|
|||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from airos.exceptions import (
|
from airos.exceptions import (
|
||||||
ConnectionAuthenticationError,
|
AirOSConnectionAuthenticationError,
|
||||||
ConnectionSetupError,
|
AirOSConnectionSetupError,
|
||||||
DataMissingError,
|
AirOSDataMissingError,
|
||||||
DeviceConnectionError,
|
AirOSDeviceConnectionError,
|
||||||
KeyDataMissingError,
|
AirOSKeyDataMissingError,
|
||||||
)
|
)
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -59,13 +59,13 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
airos_data = await airos_device.status()
|
airos_data = await airos_device.status()
|
||||||
|
|
||||||
except (
|
except (
|
||||||
ConnectionSetupError,
|
AirOSConnectionSetupError,
|
||||||
DeviceConnectionError,
|
AirOSDeviceConnectionError,
|
||||||
):
|
):
|
||||||
errors["base"] = "cannot_connect"
|
errors["base"] = "cannot_connect"
|
||||||
except (ConnectionAuthenticationError, DataMissingError):
|
except (AirOSConnectionAuthenticationError, AirOSDataMissingError):
|
||||||
errors["base"] = "invalid_auth"
|
errors["base"] = "invalid_auth"
|
||||||
except KeyDataMissingError:
|
except AirOSKeyDataMissingError:
|
||||||
errors["base"] = "key_data_missing"
|
errors["base"] = "key_data_missing"
|
||||||
except Exception:
|
except Exception:
|
||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
|||||||
@@ -6,10 +6,10 @@ import logging
|
|||||||
|
|
||||||
from airos.airos8 import AirOS, AirOSData
|
from airos.airos8 import AirOS, AirOSData
|
||||||
from airos.exceptions import (
|
from airos.exceptions import (
|
||||||
ConnectionAuthenticationError,
|
AirOSConnectionAuthenticationError,
|
||||||
ConnectionSetupError,
|
AirOSConnectionSetupError,
|
||||||
DataMissingError,
|
AirOSDataMissingError,
|
||||||
DeviceConnectionError,
|
AirOSDeviceConnectionError,
|
||||||
)
|
)
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
@@ -47,18 +47,22 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSData]):
|
|||||||
try:
|
try:
|
||||||
await self.airos_device.login()
|
await self.airos_device.login()
|
||||||
return await self.airos_device.status()
|
return await self.airos_device.status()
|
||||||
except (ConnectionAuthenticationError,) as err:
|
except (AirOSConnectionAuthenticationError,) as err:
|
||||||
_LOGGER.exception("Error authenticating with airOS device")
|
_LOGGER.exception("Error authenticating with airOS device")
|
||||||
raise ConfigEntryError(
|
raise ConfigEntryError(
|
||||||
translation_domain=DOMAIN, translation_key="invalid_auth"
|
translation_domain=DOMAIN, translation_key="invalid_auth"
|
||||||
) from err
|
) from err
|
||||||
except (ConnectionSetupError, DeviceConnectionError, TimeoutError) as err:
|
except (
|
||||||
|
AirOSConnectionSetupError,
|
||||||
|
AirOSDeviceConnectionError,
|
||||||
|
TimeoutError,
|
||||||
|
) as err:
|
||||||
_LOGGER.error("Error connecting to airOS device: %s", err)
|
_LOGGER.error("Error connecting to airOS device: %s", err)
|
||||||
raise UpdateFailed(
|
raise UpdateFailed(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
translation_key="cannot_connect",
|
translation_key="cannot_connect",
|
||||||
) from err
|
) from err
|
||||||
except (DataMissingError,) as err:
|
except (AirOSDataMissingError,) as err:
|
||||||
_LOGGER.error("Expected data not returned by airOS device: %s", err)
|
_LOGGER.error("Expected data not returned by airOS device: %s", err)
|
||||||
raise UpdateFailed(
|
raise UpdateFailed(
|
||||||
translation_domain=DOMAIN,
|
translation_domain=DOMAIN,
|
||||||
|
|||||||
33
homeassistant/components/airos/diagnostics.py
Normal file
33
homeassistant/components/airos/diagnostics.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
"""Diagnostics support for airOS."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.components.diagnostics import async_redact_data
|
||||||
|
from homeassistant.const import CONF_HOST, CONF_PASSWORD
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
|
||||||
|
from .coordinator import AirOSConfigEntry
|
||||||
|
|
||||||
|
IP_REDACT = ["addr", "ipaddr", "ip6addr", "lastip"] # IP related
|
||||||
|
HW_REDACT = ["apmac", "hwaddr", "mac"] # MAC address
|
||||||
|
TO_REDACT_HA = [CONF_HOST, CONF_PASSWORD]
|
||||||
|
TO_REDACT_AIROS = [
|
||||||
|
"hostname", # Prevent leaking device naming
|
||||||
|
"essid", # Network SSID
|
||||||
|
"lat", # GPS latitude to prevent exposing location data.
|
||||||
|
"lon", # GPS longitude to prevent exposing location data.
|
||||||
|
*HW_REDACT,
|
||||||
|
*IP_REDACT,
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
async def async_get_config_entry_diagnostics(
|
||||||
|
hass: HomeAssistant, entry: AirOSConfigEntry
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Return diagnostics for a config entry."""
|
||||||
|
return {
|
||||||
|
"entry_data": async_redact_data(entry.data, TO_REDACT_HA),
|
||||||
|
"data": async_redact_data(entry.runtime_data.data.to_dict(), TO_REDACT_AIROS),
|
||||||
|
}
|
||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"quality_scale": "bronze",
|
"quality_scale": "bronze",
|
||||||
"requirements": ["airos==0.2.1"]
|
"requirements": ["airos==0.3.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -41,7 +41,7 @@ rules:
|
|||||||
|
|
||||||
# Gold
|
# Gold
|
||||||
devices: done
|
devices: done
|
||||||
diagnostics: todo
|
diagnostics: done
|
||||||
discovery-update-info: todo
|
discovery-update-info: todo
|
||||||
discovery: todo
|
discovery: todo
|
||||||
docs-data-update: done
|
docs-data-update: done
|
||||||
@@ -54,9 +54,7 @@ rules:
|
|||||||
dynamic-devices: todo
|
dynamic-devices: todo
|
||||||
entity-category: done
|
entity-category: done
|
||||||
entity-device-class: done
|
entity-device-class: done
|
||||||
entity-disabled-by-default:
|
entity-disabled-by-default: done
|
||||||
status: todo
|
|
||||||
comment: prepared binary_sensors will provide this
|
|
||||||
entity-translations: done
|
entity-translations: done
|
||||||
exception-translations: done
|
exception-translations: done
|
||||||
icon-translations:
|
icon-translations:
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ from collections.abc import Callable
|
|||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from airos.data import NetRole, WirelessMode
|
from airos.data import DerivedWirelessMode, DerivedWirelessRole, NetRole
|
||||||
|
|
||||||
from homeassistant.components.sensor import (
|
from homeassistant.components.sensor import (
|
||||||
SensorDeviceClass,
|
SensorDeviceClass,
|
||||||
@@ -19,6 +19,8 @@ from homeassistant.const import (
|
|||||||
SIGNAL_STRENGTH_DECIBELS,
|
SIGNAL_STRENGTH_DECIBELS,
|
||||||
UnitOfDataRate,
|
UnitOfDataRate,
|
||||||
UnitOfFrequency,
|
UnitOfFrequency,
|
||||||
|
UnitOfLength,
|
||||||
|
UnitOfTime,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
@@ -29,8 +31,11 @@ from .entity import AirOSEntity
|
|||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
WIRELESS_MODE_OPTIONS = [mode.value.replace("-", "_").lower() for mode in WirelessMode]
|
|
||||||
NETROLE_OPTIONS = [mode.value for mode in NetRole]
|
NETROLE_OPTIONS = [mode.value for mode in NetRole]
|
||||||
|
WIRELESS_MODE_OPTIONS = [mode.value for mode in DerivedWirelessMode]
|
||||||
|
WIRELESS_ROLE_OPTIONS = [mode.value for mode in DerivedWirelessRole]
|
||||||
|
|
||||||
|
PARALLEL_UPDATES = 0
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True, kw_only=True)
|
@dataclass(frozen=True, kw_only=True)
|
||||||
@@ -46,6 +51,7 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
translation_key="host_cpuload",
|
translation_key="host_cpuload",
|
||||||
native_unit_of_measurement=PERCENTAGE,
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=1,
|
||||||
value_fn=lambda data: data.host.cpuload,
|
value_fn=lambda data: data.host.cpuload,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
@@ -69,13 +75,6 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
translation_key="wireless_essid",
|
translation_key="wireless_essid",
|
||||||
value_fn=lambda data: data.wireless.essid,
|
value_fn=lambda data: data.wireless.essid,
|
||||||
),
|
),
|
||||||
AirOSSensorEntityDescription(
|
|
||||||
key="wireless_mode",
|
|
||||||
translation_key="wireless_mode",
|
|
||||||
device_class=SensorDeviceClass.ENUM,
|
|
||||||
value_fn=lambda data: data.wireless.mode.value.replace("-", "_").lower(),
|
|
||||||
options=WIRELESS_MODE_OPTIONS,
|
|
||||||
),
|
|
||||||
AirOSSensorEntityDescription(
|
AirOSSensorEntityDescription(
|
||||||
key="wireless_antenna_gain",
|
key="wireless_antenna_gain",
|
||||||
translation_key="wireless_antenna_gain",
|
translation_key="wireless_antenna_gain",
|
||||||
@@ -90,6 +89,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.DATA_RATE,
|
device_class=SensorDeviceClass.DATA_RATE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||||
value_fn=lambda data: data.wireless.throughput.tx,
|
value_fn=lambda data: data.wireless.throughput.tx,
|
||||||
),
|
),
|
||||||
AirOSSensorEntityDescription(
|
AirOSSensorEntityDescription(
|
||||||
@@ -98,6 +99,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.DATA_RATE,
|
device_class=SensorDeviceClass.DATA_RATE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||||
value_fn=lambda data: data.wireless.throughput.rx,
|
value_fn=lambda data: data.wireless.throughput.rx,
|
||||||
),
|
),
|
||||||
AirOSSensorEntityDescription(
|
AirOSSensorEntityDescription(
|
||||||
@@ -106,6 +109,8 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.DATA_RATE,
|
device_class=SensorDeviceClass.DATA_RATE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||||
value_fn=lambda data: data.wireless.polling.dl_capacity,
|
value_fn=lambda data: data.wireless.polling.dl_capacity,
|
||||||
),
|
),
|
||||||
AirOSSensorEntityDescription(
|
AirOSSensorEntityDescription(
|
||||||
@@ -114,8 +119,45 @@ SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
|||||||
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
native_unit_of_measurement=UnitOfDataRate.KILOBITS_PER_SECOND,
|
||||||
device_class=SensorDeviceClass.DATA_RATE,
|
device_class=SensorDeviceClass.DATA_RATE,
|
||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfDataRate.MEGABITS_PER_SECOND,
|
||||||
value_fn=lambda data: data.wireless.polling.ul_capacity,
|
value_fn=lambda data: data.wireless.polling.ul_capacity,
|
||||||
),
|
),
|
||||||
|
AirOSSensorEntityDescription(
|
||||||
|
key="host_uptime",
|
||||||
|
translation_key="host_uptime",
|
||||||
|
native_unit_of_measurement=UnitOfTime.SECONDS,
|
||||||
|
device_class=SensorDeviceClass.DURATION,
|
||||||
|
suggested_display_precision=0,
|
||||||
|
suggested_unit_of_measurement=UnitOfTime.DAYS,
|
||||||
|
value_fn=lambda data: data.host.uptime,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
AirOSSensorEntityDescription(
|
||||||
|
key="wireless_distance",
|
||||||
|
translation_key="wireless_distance",
|
||||||
|
native_unit_of_measurement=UnitOfLength.METERS,
|
||||||
|
device_class=SensorDeviceClass.DISTANCE,
|
||||||
|
suggested_display_precision=1,
|
||||||
|
suggested_unit_of_measurement=UnitOfLength.KILOMETERS,
|
||||||
|
value_fn=lambda data: data.wireless.distance,
|
||||||
|
),
|
||||||
|
AirOSSensorEntityDescription(
|
||||||
|
key="wireless_mode",
|
||||||
|
translation_key="wireless_mode",
|
||||||
|
device_class=SensorDeviceClass.ENUM,
|
||||||
|
value_fn=lambda data: data.derived.mode.value,
|
||||||
|
options=WIRELESS_MODE_OPTIONS,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
|
AirOSSensorEntityDescription(
|
||||||
|
key="wireless_role",
|
||||||
|
translation_key="wireless_role",
|
||||||
|
device_class=SensorDeviceClass.ENUM,
|
||||||
|
value_fn=lambda data: data.derived.role.value,
|
||||||
|
options=WIRELESS_ROLE_OPTIONS,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -26,6 +26,23 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"binary_sensor": {
|
||||||
|
"port_forwarding": {
|
||||||
|
"name": "Port forwarding"
|
||||||
|
},
|
||||||
|
"dhcp_client": {
|
||||||
|
"name": "DHCP client"
|
||||||
|
},
|
||||||
|
"dhcp_server": {
|
||||||
|
"name": "DHCP server"
|
||||||
|
},
|
||||||
|
"dhcp6_server": {
|
||||||
|
"name": "DHCPv6 server"
|
||||||
|
},
|
||||||
|
"pppoe": {
|
||||||
|
"name": "PPPoE link"
|
||||||
|
}
|
||||||
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"host_cpuload": {
|
"host_cpuload": {
|
||||||
"name": "CPU load"
|
"name": "CPU load"
|
||||||
@@ -43,13 +60,6 @@
|
|||||||
"wireless_essid": {
|
"wireless_essid": {
|
||||||
"name": "Wireless SSID"
|
"name": "Wireless SSID"
|
||||||
},
|
},
|
||||||
"wireless_mode": {
|
|
||||||
"name": "Wireless mode",
|
|
||||||
"state": {
|
|
||||||
"ap_ptp": "Access point",
|
|
||||||
"sta_ptp": "Station"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"wireless_antenna_gain": {
|
"wireless_antenna_gain": {
|
||||||
"name": "Antenna gain"
|
"name": "Antenna gain"
|
||||||
},
|
},
|
||||||
@@ -67,6 +77,26 @@
|
|||||||
},
|
},
|
||||||
"wireless_remote_hostname": {
|
"wireless_remote_hostname": {
|
||||||
"name": "Remote hostname"
|
"name": "Remote hostname"
|
||||||
|
},
|
||||||
|
"host_uptime": {
|
||||||
|
"name": "Uptime"
|
||||||
|
},
|
||||||
|
"wireless_distance": {
|
||||||
|
"name": "Wireless distance"
|
||||||
|
},
|
||||||
|
"wireless_role": {
|
||||||
|
"name": "Wireless role",
|
||||||
|
"state": {
|
||||||
|
"access_point": "Access point",
|
||||||
|
"station": "Station"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"wireless_mode": {
|
||||||
|
"name": "Wireless mode",
|
||||||
|
"state": {
|
||||||
|
"point_to_point": "Point-to-point",
|
||||||
|
"point_to_multipoint": "Point-to-multipoint"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
|
|||||||
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
|
from .const import CONF_CLIP_NEGATIVE, CONF_RETURN_AVERAGE
|
||||||
from .coordinator import AirQCoordinator
|
from .coordinator import AirQCoordinator
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR]
|
||||||
|
|
||||||
AirQConfigEntry = ConfigEntry[AirQCoordinator]
|
AirQConfigEntry = ConfigEntry[AirQCoordinator]
|
||||||
|
|
||||||
|
|||||||
@@ -75,6 +75,7 @@ class AirQCoordinator(DataUpdateCoordinator):
|
|||||||
return_average=self.return_average,
|
return_average=self.return_average,
|
||||||
clip_negative_values=self.clip_negative,
|
clip_negative_values=self.clip_negative,
|
||||||
)
|
)
|
||||||
|
data["brightness"] = await self.airq.get_current_brightness()
|
||||||
if warming_up_sensors := identify_warming_up_sensors(data):
|
if warming_up_sensors := identify_warming_up_sensors(data):
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Following sensors are still warming up: %s", warming_up_sensors
|
"Following sensors are still warming up: %s", warming_up_sensors
|
||||||
|
|||||||
85
homeassistant/components/airq/number.py
Normal file
85
homeassistant/components/airq/number.py
Normal file
@@ -0,0 +1,85 @@
|
|||||||
|
"""Definition of air-Q number platform used to control the LED strips."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Awaitable, Callable
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from aioairq.core import AirQ
|
||||||
|
|
||||||
|
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||||
|
from homeassistant.const import PERCENTAGE
|
||||||
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
|
from . import AirQConfigEntry, AirQCoordinator
|
||||||
|
|
||||||
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass(frozen=True, kw_only=True)
|
||||||
|
class AirQBrightnessDescription(NumberEntityDescription):
|
||||||
|
"""Describes AirQ number entity responsible for brightness control."""
|
||||||
|
|
||||||
|
value: Callable[[dict], float]
|
||||||
|
set_value: Callable[[AirQ, float], Awaitable[None]]
|
||||||
|
|
||||||
|
|
||||||
|
AIRQ_LED_BRIGHTNESS = AirQBrightnessDescription(
|
||||||
|
key="airq_led_brightness",
|
||||||
|
translation_key="airq_led_brightness",
|
||||||
|
native_min_value=0.0,
|
||||||
|
native_max_value=100.0,
|
||||||
|
native_step=1.0,
|
||||||
|
native_unit_of_measurement=PERCENTAGE,
|
||||||
|
value=lambda data: data["brightness"],
|
||||||
|
set_value=lambda device, value: device.set_current_brightness(value),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def async_setup_entry(
|
||||||
|
hass: HomeAssistant,
|
||||||
|
entry: AirQConfigEntry,
|
||||||
|
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||||
|
) -> None:
|
||||||
|
"""Set up number entities: a single entity for the LEDs."""
|
||||||
|
|
||||||
|
coordinator = entry.runtime_data
|
||||||
|
entities = [AirQLEDBrightness(coordinator, AIRQ_LED_BRIGHTNESS)]
|
||||||
|
|
||||||
|
async_add_entities(entities)
|
||||||
|
|
||||||
|
|
||||||
|
class AirQLEDBrightness(CoordinatorEntity[AirQCoordinator], NumberEntity):
|
||||||
|
"""Representation of the LEDs from a single AirQ."""
|
||||||
|
|
||||||
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
coordinator: AirQCoordinator,
|
||||||
|
description: AirQBrightnessDescription,
|
||||||
|
) -> None:
|
||||||
|
"""Initialize a single sensor."""
|
||||||
|
super().__init__(coordinator)
|
||||||
|
self.entity_description: AirQBrightnessDescription = description
|
||||||
|
|
||||||
|
self._attr_device_info = coordinator.device_info
|
||||||
|
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
|
||||||
|
|
||||||
|
@property
|
||||||
|
def native_value(self) -> float:
|
||||||
|
"""Return the brightness of the LEDs in %."""
|
||||||
|
return self.entity_description.value(self.coordinator.data)
|
||||||
|
|
||||||
|
async def async_set_native_value(self, value: float) -> None:
|
||||||
|
"""Set the brightness of the LEDs to the value in %."""
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Changing LED brighntess from %.0f%% to %.0f%%",
|
||||||
|
self.coordinator.data["brightness"],
|
||||||
|
value,
|
||||||
|
)
|
||||||
|
await self.entity_description.set_value(self.coordinator.airq, value)
|
||||||
|
await self.coordinator.async_request_refresh()
|
||||||
@@ -35,6 +35,11 @@
|
|||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
"number": {
|
||||||
|
"airq_led_brightness": {
|
||||||
|
"name": "LED brightness"
|
||||||
|
}
|
||||||
|
},
|
||||||
"sensor": {
|
"sensor": {
|
||||||
"acetaldehyde": {
|
"acetaldehyde": {
|
||||||
"name": "Acetaldehyde"
|
"name": "Acetaldehyde"
|
||||||
|
|||||||
@@ -7,21 +7,18 @@ import logging
|
|||||||
|
|
||||||
from airthings import Airthings
|
from airthings import Airthings
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
|
||||||
from homeassistant.const import CONF_ID, Platform
|
from homeassistant.const import CONF_ID, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
from .const import CONF_SECRET
|
from .const import CONF_SECRET
|
||||||
from .coordinator import AirthingsDataUpdateCoordinator
|
from .coordinator import AirthingsConfigEntry, AirthingsDataUpdateCoordinator
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||||
SCAN_INTERVAL = timedelta(minutes=6)
|
SCAN_INTERVAL = timedelta(minutes=6)
|
||||||
|
|
||||||
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
|
|
||||||
|
|
||||||
|
|
||||||
async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool:
|
async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) -> bool:
|
||||||
"""Set up Airthings from a config entry."""
|
"""Set up Airthings from a config entry."""
|
||||||
@@ -31,7 +28,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirthingsConfigEntry) ->
|
|||||||
async_get_clientsession(hass),
|
async_get_clientsession(hass),
|
||||||
)
|
)
|
||||||
|
|
||||||
coordinator = AirthingsDataUpdateCoordinator(hass, airthings)
|
coordinator = AirthingsDataUpdateCoordinator(hass, airthings, entry)
|
||||||
|
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ import logging
|
|||||||
|
|
||||||
from airthings import Airthings, AirthingsDevice, AirthingsError
|
from airthings import Airthings, AirthingsDevice, AirthingsError
|
||||||
|
|
||||||
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
@@ -13,15 +14,23 @@ from .const import DOMAIN
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
SCAN_INTERVAL = timedelta(minutes=6)
|
SCAN_INTERVAL = timedelta(minutes=6)
|
||||||
|
|
||||||
|
type AirthingsConfigEntry = ConfigEntry[AirthingsDataUpdateCoordinator]
|
||||||
|
|
||||||
|
|
||||||
class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]):
|
class AirthingsDataUpdateCoordinator(DataUpdateCoordinator[dict[str, AirthingsDevice]]):
|
||||||
"""Coordinator for Airthings data updates."""
|
"""Coordinator for Airthings data updates."""
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, airthings: Airthings) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
hass: HomeAssistant,
|
||||||
|
airthings: Airthings,
|
||||||
|
config_entry: AirthingsConfigEntry,
|
||||||
|
) -> None:
|
||||||
"""Initialize the coordinator."""
|
"""Initialize the coordinator."""
|
||||||
super().__init__(
|
super().__init__(
|
||||||
hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
|
config_entry=config_entry,
|
||||||
name=DOMAIN,
|
name=DOMAIN,
|
||||||
update_method=self._update_method,
|
update_method=self._update_method,
|
||||||
update_interval=SCAN_INTERVAL,
|
update_interval=SCAN_INTERVAL,
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ DOMAIN: Final = "amberelectric"
|
|||||||
CONF_SITE_NAME = "site_name"
|
CONF_SITE_NAME = "site_name"
|
||||||
CONF_SITE_ID = "site_id"
|
CONF_SITE_ID = "site_id"
|
||||||
|
|
||||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
|
||||||
ATTR_CHANNEL_TYPE = "channel_type"
|
ATTR_CHANNEL_TYPE = "channel_type"
|
||||||
|
|
||||||
ATTRIBUTION = "Data provided by Amber Electric"
|
ATTRIBUTION = "Data provided by Amber Electric"
|
||||||
|
|||||||
@@ -4,6 +4,7 @@ from amberelectric.models.channel import ChannelType
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
|
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||||
from homeassistant.core import (
|
from homeassistant.core import (
|
||||||
HomeAssistant,
|
HomeAssistant,
|
||||||
ServiceCall,
|
ServiceCall,
|
||||||
@@ -16,7 +17,6 @@ from homeassistant.util.json import JsonValueType
|
|||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ATTR_CHANNEL_TYPE,
|
ATTR_CHANNEL_TYPE,
|
||||||
ATTR_CONFIG_ENTRY_ID,
|
|
||||||
CONTROLLED_LOAD_CHANNEL,
|
CONTROLLED_LOAD_CHANNEL,
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
FEED_IN_CHANNEL,
|
FEED_IN_CHANNEL,
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
|||||||
from aioambient.util import get_public_device_id
|
from aioambient.util import get_public_device_id
|
||||||
|
|
||||||
from homeassistant.core import callback
|
from homeassistant.core import callback
|
||||||
from homeassistant.helpers.device_registry import DeviceInfo
|
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||||
from homeassistant.helpers.entity import Entity, EntityDescription
|
from homeassistant.helpers.entity import Entity, EntityDescription
|
||||||
|
|
||||||
@@ -37,6 +37,7 @@ class AmbientWeatherEntity(Entity):
|
|||||||
identifiers={(DOMAIN, mac_address)},
|
identifiers={(DOMAIN, mac_address)},
|
||||||
manufacturer="Ambient Weather",
|
manufacturer="Ambient Weather",
|
||||||
name=station_name.capitalize(),
|
name=station_name.capitalize(),
|
||||||
|
connections={(CONNECTION_NETWORK_MAC, mac_address)},
|
||||||
)
|
)
|
||||||
|
|
||||||
self._attr_unique_id = f"{mac_address}_{description.key}"
|
self._attr_unique_id = f"{mac_address}_{description.key}"
|
||||||
|
|||||||
@@ -390,7 +390,6 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
|||||||
|
|
||||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||||
"""Return the devices payload."""
|
"""Return the devices payload."""
|
||||||
integrations_without_model_id: set[str] = set()
|
|
||||||
devices: list[dict[str, Any]] = []
|
devices: list[dict[str, Any]] = []
|
||||||
dev_reg = dr.async_get(hass)
|
dev_reg = dr.async_get(hass)
|
||||||
# Devices that need via device info set
|
# Devices that need via device info set
|
||||||
@@ -400,10 +399,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
|||||||
seen_integrations = set()
|
seen_integrations = set()
|
||||||
|
|
||||||
for device in dev_reg.devices.values():
|
for device in dev_reg.devices.values():
|
||||||
# Ignore services
|
|
||||||
if device.entry_type:
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not device.primary_config_entry:
|
if not device.primary_config_entry:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -414,13 +409,6 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
|||||||
|
|
||||||
seen_integrations.add(config_entry.domain)
|
seen_integrations.add(config_entry.domain)
|
||||||
|
|
||||||
if not device.model_id:
|
|
||||||
integrations_without_model_id.add(config_entry.domain)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not device.manufacturer:
|
|
||||||
continue
|
|
||||||
|
|
||||||
new_indexes[device.id] = len(devices)
|
new_indexes[device.id] = len(devices)
|
||||||
devices.append(
|
devices.append(
|
||||||
{
|
{
|
||||||
@@ -430,11 +418,12 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
|||||||
"model": device.model,
|
"model": device.model,
|
||||||
"sw_version": device.sw_version,
|
"sw_version": device.sw_version,
|
||||||
"hw_version": device.hw_version,
|
"hw_version": device.hw_version,
|
||||||
"has_suggested_area": device.suggested_area is not None,
|
|
||||||
"has_configuration_url": device.configuration_url is not None,
|
"has_configuration_url": device.configuration_url is not None,
|
||||||
"via_device": None,
|
"via_device": None,
|
||||||
|
"entry_type": device.entry_type.value if device.entry_type else None,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
if device.via_device_id:
|
if device.via_device_id:
|
||||||
via_devices[device.id] = device.via_device_id
|
via_devices[device.id] = device.via_device_id
|
||||||
|
|
||||||
@@ -454,15 +443,11 @@ async def async_devices_payload(hass: HomeAssistant) -> dict:
|
|||||||
for device_info in devices:
|
for device_info in devices:
|
||||||
if integration := integrations.get(device_info["integration"]):
|
if integration := integrations.get(device_info["integration"]):
|
||||||
device_info["is_custom_integration"] = not integration.is_built_in
|
device_info["is_custom_integration"] = not integration.is_built_in
|
||||||
|
# Include version for custom integrations
|
||||||
|
if not integration.is_built_in and integration.version:
|
||||||
|
device_info["custom_integration_version"] = str(integration.version)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"version": "home-assistant:1",
|
"version": "home-assistant:1",
|
||||||
"no_model_id": sorted(
|
|
||||||
[
|
|
||||||
domain
|
|
||||||
for domain in integrations_without_model_id
|
|
||||||
if domain in integrations and integrations[domain].is_built_in
|
|
||||||
]
|
|
||||||
),
|
|
||||||
"devices": devices,
|
"devices": devices,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -30,10 +30,9 @@ class AndroidIPCamDataUpdateCoordinator(DataUpdateCoordinator[None]):
|
|||||||
cam: PyDroidIPCam,
|
cam: PyDroidIPCam,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the Android IP Webcam."""
|
"""Initialize the Android IP Webcam."""
|
||||||
self.hass = hass
|
|
||||||
self.cam = cam
|
self.cam = cam
|
||||||
super().__init__(
|
super().__init__(
|
||||||
self.hass,
|
hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
config_entry=config_entry,
|
config_entry=config_entry,
|
||||||
name=f"{DOMAIN} {config_entry.data[CONF_HOST]}",
|
name=f"{DOMAIN} {config_entry.data[CONF_HOST]}",
|
||||||
|
|||||||
@@ -81,11 +81,15 @@ async def async_update_options(
|
|||||||
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
async def async_migrate_integration(hass: HomeAssistant) -> None:
|
||||||
"""Migrate integration entry structure."""
|
"""Migrate integration entry structure."""
|
||||||
|
|
||||||
entries = hass.config_entries.async_entries(DOMAIN)
|
# Make sure we get enabled config entries first
|
||||||
|
entries = sorted(
|
||||||
|
hass.config_entries.async_entries(DOMAIN),
|
||||||
|
key=lambda e: e.disabled_by is not None,
|
||||||
|
)
|
||||||
if not any(entry.version == 1 for entry in entries):
|
if not any(entry.version == 1 for entry in entries):
|
||||||
return
|
return
|
||||||
|
|
||||||
api_keys_entries: dict[str, ConfigEntry] = {}
|
api_keys_entries: dict[str, tuple[ConfigEntry, bool]] = {}
|
||||||
entity_registry = er.async_get(hass)
|
entity_registry = er.async_get(hass)
|
||||||
device_registry = dr.async_get(hass)
|
device_registry = dr.async_get(hass)
|
||||||
|
|
||||||
@@ -99,30 +103,61 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
|||||||
)
|
)
|
||||||
if entry.data[CONF_API_KEY] not in api_keys_entries:
|
if entry.data[CONF_API_KEY] not in api_keys_entries:
|
||||||
use_existing = True
|
use_existing = True
|
||||||
api_keys_entries[entry.data[CONF_API_KEY]] = entry
|
all_disabled = all(
|
||||||
|
e.disabled_by is not None
|
||||||
|
for e in entries
|
||||||
|
if e.data[CONF_API_KEY] == entry.data[CONF_API_KEY]
|
||||||
|
)
|
||||||
|
api_keys_entries[entry.data[CONF_API_KEY]] = (entry, all_disabled)
|
||||||
|
|
||||||
parent_entry = api_keys_entries[entry.data[CONF_API_KEY]]
|
parent_entry, all_disabled = api_keys_entries[entry.data[CONF_API_KEY]]
|
||||||
|
|
||||||
hass.config_entries.async_add_subentry(parent_entry, subentry)
|
hass.config_entries.async_add_subentry(parent_entry, subentry)
|
||||||
conversation_entity = entity_registry.async_get_entity_id(
|
conversation_entity_id = entity_registry.async_get_entity_id(
|
||||||
"conversation",
|
"conversation",
|
||||||
DOMAIN,
|
DOMAIN,
|
||||||
entry.entry_id,
|
entry.entry_id,
|
||||||
)
|
)
|
||||||
if conversation_entity is not None:
|
|
||||||
entity_registry.async_update_entity(
|
|
||||||
conversation_entity,
|
|
||||||
config_entry_id=parent_entry.entry_id,
|
|
||||||
config_subentry_id=subentry.subentry_id,
|
|
||||||
new_unique_id=subentry.subentry_id,
|
|
||||||
)
|
|
||||||
|
|
||||||
device = device_registry.async_get_device(
|
device = device_registry.async_get_device(
|
||||||
identifiers={(DOMAIN, entry.entry_id)}
|
identifiers={(DOMAIN, entry.entry_id)}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if conversation_entity_id is not None:
|
||||||
|
conversation_entity_entry = entity_registry.entities[conversation_entity_id]
|
||||||
|
entity_disabled_by = conversation_entity_entry.disabled_by
|
||||||
|
if (
|
||||||
|
entity_disabled_by is er.RegistryEntryDisabler.CONFIG_ENTRY
|
||||||
|
and not all_disabled
|
||||||
|
):
|
||||||
|
# Device and entity registries don't update the disabled_by flag
|
||||||
|
# when moving a device or entity from one config entry to another,
|
||||||
|
# so we need to do it manually.
|
||||||
|
entity_disabled_by = (
|
||||||
|
er.RegistryEntryDisabler.DEVICE
|
||||||
|
if device
|
||||||
|
else er.RegistryEntryDisabler.USER
|
||||||
|
)
|
||||||
|
entity_registry.async_update_entity(
|
||||||
|
conversation_entity_id,
|
||||||
|
config_entry_id=parent_entry.entry_id,
|
||||||
|
config_subentry_id=subentry.subentry_id,
|
||||||
|
disabled_by=entity_disabled_by,
|
||||||
|
new_unique_id=subentry.subentry_id,
|
||||||
|
)
|
||||||
|
|
||||||
if device is not None:
|
if device is not None:
|
||||||
|
# Device and entity registries don't update the disabled_by flag when
|
||||||
|
# moving a device or entity from one config entry to another, so we
|
||||||
|
# need to do it manually.
|
||||||
|
device_disabled_by = device.disabled_by
|
||||||
|
if (
|
||||||
|
device.disabled_by is dr.DeviceEntryDisabler.CONFIG_ENTRY
|
||||||
|
and not all_disabled
|
||||||
|
):
|
||||||
|
device_disabled_by = dr.DeviceEntryDisabler.USER
|
||||||
device_registry.async_update_device(
|
device_registry.async_update_device(
|
||||||
device.id,
|
device.id,
|
||||||
|
disabled_by=device_disabled_by,
|
||||||
new_identifiers={(DOMAIN, subentry.subentry_id)},
|
new_identifiers={(DOMAIN, subentry.subentry_id)},
|
||||||
add_config_subentry_id=subentry.subentry_id,
|
add_config_subentry_id=subentry.subentry_id,
|
||||||
add_config_entry_id=parent_entry.entry_id,
|
add_config_entry_id=parent_entry.entry_id,
|
||||||
@@ -147,7 +182,7 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
|
|||||||
title=DEFAULT_CONVERSATION_NAME,
|
title=DEFAULT_CONVERSATION_NAME,
|
||||||
options={},
|
options={},
|
||||||
version=2,
|
version=2,
|
||||||
minor_version=2,
|
minor_version=3,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -173,6 +208,38 @@ async def async_migrate_entry(hass: HomeAssistant, entry: AnthropicConfigEntry)
|
|||||||
|
|
||||||
hass.config_entries.async_update_entry(entry, minor_version=2)
|
hass.config_entries.async_update_entry(entry, minor_version=2)
|
||||||
|
|
||||||
|
if entry.version == 2 and entry.minor_version == 2:
|
||||||
|
# Fix migration where the disabled_by flag was not set correctly.
|
||||||
|
# We can currently only correct this for enabled config entries,
|
||||||
|
# because migration does not run for disabled config entries. This
|
||||||
|
# is asserted in tests, and if that behavior is changed, we should
|
||||||
|
# correct also disabled config entries.
|
||||||
|
device_registry = dr.async_get(hass)
|
||||||
|
entity_registry = er.async_get(hass)
|
||||||
|
devices = dr.async_entries_for_config_entry(device_registry, entry.entry_id)
|
||||||
|
entity_entries = er.async_entries_for_config_entry(
|
||||||
|
entity_registry, entry.entry_id
|
||||||
|
)
|
||||||
|
if entry.disabled_by is None:
|
||||||
|
# If the config entry is not disabled, we need to set the disabled_by
|
||||||
|
# flag on devices to USER, and on entities to DEVICE, if they are set
|
||||||
|
# to CONFIG_ENTRY.
|
||||||
|
for device in devices:
|
||||||
|
if device.disabled_by is not dr.DeviceEntryDisabler.CONFIG_ENTRY:
|
||||||
|
continue
|
||||||
|
device_registry.async_update_device(
|
||||||
|
device.id,
|
||||||
|
disabled_by=dr.DeviceEntryDisabler.USER,
|
||||||
|
)
|
||||||
|
for entity in entity_entries:
|
||||||
|
if entity.disabled_by is not er.RegistryEntryDisabler.CONFIG_ENTRY:
|
||||||
|
continue
|
||||||
|
entity_registry.async_update_entity(
|
||||||
|
entity.entity_id,
|
||||||
|
disabled_by=er.RegistryEntryDisabler.DEVICE,
|
||||||
|
)
|
||||||
|
hass.config_entries.async_update_entry(entry, minor_version=3)
|
||||||
|
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
"Migration to version %s:%s successful", entry.version, entry.minor_version
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -75,7 +75,7 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
"""Handle a config flow for Anthropic."""
|
"""Handle a config flow for Anthropic."""
|
||||||
|
|
||||||
VERSION = 2
|
VERSION = 2
|
||||||
MINOR_VERSION = 2
|
MINOR_VERSION = 3
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, Any] | None = None
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
|||||||
@@ -20,10 +20,8 @@ RECOMMENDED_THINKING_BUDGET = 0
|
|||||||
MIN_THINKING_BUDGET = 1024
|
MIN_THINKING_BUDGET = 1024
|
||||||
|
|
||||||
THINKING_MODELS = [
|
THINKING_MODELS = [
|
||||||
"claude-3-7-sonnet-20250219",
|
"claude-3-7-sonnet",
|
||||||
"claude-3-7-sonnet-latest",
|
|
||||||
"claude-opus-4-20250514",
|
|
||||||
"claude-opus-4-0",
|
|
||||||
"claude-sonnet-4-20250514",
|
|
||||||
"claude-sonnet-4-0",
|
"claude-sonnet-4-0",
|
||||||
|
"claude-opus-4-0",
|
||||||
|
"claude-opus-4-1",
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -2,11 +2,10 @@
|
|||||||
|
|
||||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||||
import json
|
import json
|
||||||
from typing import Any, cast
|
from typing import Any
|
||||||
|
|
||||||
import anthropic
|
import anthropic
|
||||||
from anthropic import AsyncStream
|
from anthropic import AsyncStream
|
||||||
from anthropic._types import NOT_GIVEN
|
|
||||||
from anthropic.types import (
|
from anthropic.types import (
|
||||||
InputJSONDelta,
|
InputJSONDelta,
|
||||||
MessageDeltaUsage,
|
MessageDeltaUsage,
|
||||||
@@ -17,7 +16,6 @@ from anthropic.types import (
|
|||||||
RawContentBlockStopEvent,
|
RawContentBlockStopEvent,
|
||||||
RawMessageDeltaEvent,
|
RawMessageDeltaEvent,
|
||||||
RawMessageStartEvent,
|
RawMessageStartEvent,
|
||||||
RawMessageStopEvent,
|
|
||||||
RedactedThinkingBlock,
|
RedactedThinkingBlock,
|
||||||
RedactedThinkingBlockParam,
|
RedactedThinkingBlockParam,
|
||||||
SignatureDelta,
|
SignatureDelta,
|
||||||
@@ -35,6 +33,7 @@ from anthropic.types import (
|
|||||||
ToolUseBlockParam,
|
ToolUseBlockParam,
|
||||||
Usage,
|
Usage,
|
||||||
)
|
)
|
||||||
|
from anthropic.types.message_create_params import MessageCreateParamsStreaming
|
||||||
from voluptuous_openapi import convert
|
from voluptuous_openapi import convert
|
||||||
|
|
||||||
from homeassistant.components import conversation
|
from homeassistant.components import conversation
|
||||||
@@ -129,6 +128,28 @@ def _convert_content(
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if isinstance(content.native, ThinkingBlock):
|
||||||
|
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||||
|
ThinkingBlockParam(
|
||||||
|
type="thinking",
|
||||||
|
thinking=content.thinking_content or "",
|
||||||
|
signature=content.native.signature,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif isinstance(content.native, RedactedThinkingBlock):
|
||||||
|
redacted_thinking_block = RedactedThinkingBlockParam(
|
||||||
|
type="redacted_thinking",
|
||||||
|
data=content.native.data,
|
||||||
|
)
|
||||||
|
if isinstance(messages[-1]["content"], str):
|
||||||
|
messages[-1]["content"] = [
|
||||||
|
TextBlockParam(type="text", text=messages[-1]["content"]),
|
||||||
|
redacted_thinking_block,
|
||||||
|
]
|
||||||
|
else:
|
||||||
|
messages[-1]["content"].append( # type: ignore[attr-defined]
|
||||||
|
redacted_thinking_block
|
||||||
|
)
|
||||||
if content.content:
|
if content.content:
|
||||||
messages[-1]["content"].append( # type: ignore[union-attr]
|
messages[-1]["content"].append( # type: ignore[union-attr]
|
||||||
TextBlockParam(type="text", text=content.content)
|
TextBlockParam(type="text", text=content.content)
|
||||||
@@ -152,10 +173,9 @@ def _convert_content(
|
|||||||
return messages
|
return messages
|
||||||
|
|
||||||
|
|
||||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
async def _transform_stream(
|
||||||
chat_log: conversation.ChatLog,
|
chat_log: conversation.ChatLog,
|
||||||
result: AsyncStream[MessageStreamEvent],
|
stream: AsyncStream[MessageStreamEvent],
|
||||||
messages: list[MessageParam],
|
|
||||||
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
) -> AsyncGenerator[conversation.AssistantContentDeltaDict]:
|
||||||
"""Transform the response stream into HA format.
|
"""Transform the response stream into HA format.
|
||||||
|
|
||||||
@@ -186,31 +206,25 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
|
|
||||||
Each message could contain multiple blocks of the same type.
|
Each message could contain multiple blocks of the same type.
|
||||||
"""
|
"""
|
||||||
if result is None:
|
if stream is None:
|
||||||
raise TypeError("Expected a stream of messages")
|
raise TypeError("Expected a stream of messages")
|
||||||
|
|
||||||
current_message: MessageParam | None = None
|
current_tool_block: ToolUseBlockParam | None = None
|
||||||
current_block: (
|
|
||||||
TextBlockParam
|
|
||||||
| ToolUseBlockParam
|
|
||||||
| ThinkingBlockParam
|
|
||||||
| RedactedThinkingBlockParam
|
|
||||||
| None
|
|
||||||
) = None
|
|
||||||
current_tool_args: str
|
current_tool_args: str
|
||||||
input_usage: Usage | None = None
|
input_usage: Usage | None = None
|
||||||
|
has_content = False
|
||||||
|
has_native = False
|
||||||
|
|
||||||
async for response in result:
|
async for response in stream:
|
||||||
LOGGER.debug("Received response: %s", response)
|
LOGGER.debug("Received response: %s", response)
|
||||||
|
|
||||||
if isinstance(response, RawMessageStartEvent):
|
if isinstance(response, RawMessageStartEvent):
|
||||||
if response.message.role != "assistant":
|
if response.message.role != "assistant":
|
||||||
raise ValueError("Unexpected message role")
|
raise ValueError("Unexpected message role")
|
||||||
current_message = MessageParam(role=response.message.role, content=[])
|
|
||||||
input_usage = response.message.usage
|
input_usage = response.message.usage
|
||||||
elif isinstance(response, RawContentBlockStartEvent):
|
elif isinstance(response, RawContentBlockStartEvent):
|
||||||
if isinstance(response.content_block, ToolUseBlock):
|
if isinstance(response.content_block, ToolUseBlock):
|
||||||
current_block = ToolUseBlockParam(
|
current_tool_block = ToolUseBlockParam(
|
||||||
type="tool_use",
|
type="tool_use",
|
||||||
id=response.content_block.id,
|
id=response.content_block.id,
|
||||||
name=response.content_block.name,
|
name=response.content_block.name,
|
||||||
@@ -218,75 +232,64 @@ async def _transform_stream( # noqa: C901 - This is complex, but better to have
|
|||||||
)
|
)
|
||||||
current_tool_args = ""
|
current_tool_args = ""
|
||||||
elif isinstance(response.content_block, TextBlock):
|
elif isinstance(response.content_block, TextBlock):
|
||||||
current_block = TextBlockParam(
|
if has_content:
|
||||||
type="text", text=response.content_block.text
|
yield {"role": "assistant"}
|
||||||
)
|
has_native = False
|
||||||
yield {"role": "assistant"}
|
has_content = True
|
||||||
if response.content_block.text:
|
if response.content_block.text:
|
||||||
yield {"content": response.content_block.text}
|
yield {"content": response.content_block.text}
|
||||||
elif isinstance(response.content_block, ThinkingBlock):
|
elif isinstance(response.content_block, ThinkingBlock):
|
||||||
current_block = ThinkingBlockParam(
|
if has_native:
|
||||||
type="thinking",
|
yield {"role": "assistant"}
|
||||||
thinking=response.content_block.thinking,
|
has_native = False
|
||||||
signature=response.content_block.signature,
|
has_content = False
|
||||||
)
|
|
||||||
elif isinstance(response.content_block, RedactedThinkingBlock):
|
elif isinstance(response.content_block, RedactedThinkingBlock):
|
||||||
current_block = RedactedThinkingBlockParam(
|
|
||||||
type="redacted_thinking", data=response.content_block.data
|
|
||||||
)
|
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Some of Claude’s internal reasoning has been automatically "
|
"Some of Claude’s internal reasoning has been automatically "
|
||||||
"encrypted for safety reasons. This doesn’t affect the quality of "
|
"encrypted for safety reasons. This doesn’t affect the quality of "
|
||||||
"responses"
|
"responses"
|
||||||
)
|
)
|
||||||
|
if has_native:
|
||||||
|
yield {"role": "assistant"}
|
||||||
|
has_native = False
|
||||||
|
has_content = False
|
||||||
|
yield {"native": response.content_block}
|
||||||
|
has_native = True
|
||||||
elif isinstance(response, RawContentBlockDeltaEvent):
|
elif isinstance(response, RawContentBlockDeltaEvent):
|
||||||
if current_block is None:
|
|
||||||
raise ValueError("Unexpected delta without a block")
|
|
||||||
if isinstance(response.delta, InputJSONDelta):
|
if isinstance(response.delta, InputJSONDelta):
|
||||||
current_tool_args += response.delta.partial_json
|
current_tool_args += response.delta.partial_json
|
||||||
elif isinstance(response.delta, TextDelta):
|
elif isinstance(response.delta, TextDelta):
|
||||||
text_block = cast(TextBlockParam, current_block)
|
|
||||||
text_block["text"] += response.delta.text
|
|
||||||
yield {"content": response.delta.text}
|
yield {"content": response.delta.text}
|
||||||
elif isinstance(response.delta, ThinkingDelta):
|
elif isinstance(response.delta, ThinkingDelta):
|
||||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
yield {"thinking_content": response.delta.thinking}
|
||||||
thinking_block["thinking"] += response.delta.thinking
|
|
||||||
elif isinstance(response.delta, SignatureDelta):
|
elif isinstance(response.delta, SignatureDelta):
|
||||||
thinking_block = cast(ThinkingBlockParam, current_block)
|
yield {
|
||||||
thinking_block["signature"] += response.delta.signature
|
"native": ThinkingBlock(
|
||||||
|
type="thinking",
|
||||||
|
thinking="",
|
||||||
|
signature=response.delta.signature,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
has_native = True
|
||||||
elif isinstance(response, RawContentBlockStopEvent):
|
elif isinstance(response, RawContentBlockStopEvent):
|
||||||
if current_block is None:
|
if current_tool_block is not None:
|
||||||
raise ValueError("Unexpected stop event without a current block")
|
|
||||||
if current_block["type"] == "tool_use":
|
|
||||||
# tool block
|
|
||||||
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
tool_args = json.loads(current_tool_args) if current_tool_args else {}
|
||||||
current_block["input"] = tool_args
|
current_tool_block["input"] = tool_args
|
||||||
yield {
|
yield {
|
||||||
"tool_calls": [
|
"tool_calls": [
|
||||||
llm.ToolInput(
|
llm.ToolInput(
|
||||||
id=current_block["id"],
|
id=current_tool_block["id"],
|
||||||
tool_name=current_block["name"],
|
tool_name=current_tool_block["name"],
|
||||||
tool_args=tool_args,
|
tool_args=tool_args,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
elif current_block["type"] == "thinking":
|
current_tool_block = None
|
||||||
# thinking block
|
|
||||||
LOGGER.debug("Thinking: %s", current_block["thinking"])
|
|
||||||
|
|
||||||
if current_message is None:
|
|
||||||
raise ValueError("Unexpected stop event without a current message")
|
|
||||||
current_message["content"].append(current_block) # type: ignore[union-attr]
|
|
||||||
current_block = None
|
|
||||||
elif isinstance(response, RawMessageDeltaEvent):
|
elif isinstance(response, RawMessageDeltaEvent):
|
||||||
if (usage := response.usage) is not None:
|
if (usage := response.usage) is not None:
|
||||||
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
chat_log.async_trace(_create_token_stats(input_usage, usage))
|
||||||
if response.delta.stop_reason == "refusal":
|
if response.delta.stop_reason == "refusal":
|
||||||
raise HomeAssistantError("Potential policy violation detected")
|
raise HomeAssistantError("Potential policy violation detected")
|
||||||
elif isinstance(response, RawMessageStopEvent):
|
|
||||||
if current_message is not None:
|
|
||||||
messages.append(current_message)
|
|
||||||
current_message = None
|
|
||||||
|
|
||||||
|
|
||||||
def _create_token_stats(
|
def _create_token_stats(
|
||||||
@@ -351,45 +354,48 @@ class AnthropicBaseLLMEntity(Entity):
|
|||||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||||
|
|
||||||
|
model_args = MessageCreateParamsStreaming(
|
||||||
|
model=model,
|
||||||
|
messages=messages,
|
||||||
|
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||||
|
system=system.content,
|
||||||
|
stream=True,
|
||||||
|
)
|
||||||
|
if tools:
|
||||||
|
model_args["tools"] = tools
|
||||||
|
if (
|
||||||
|
model.startswith(tuple(THINKING_MODELS))
|
||||||
|
and thinking_budget >= MIN_THINKING_BUDGET
|
||||||
|
):
|
||||||
|
model_args["thinking"] = ThinkingConfigEnabledParam(
|
||||||
|
type="enabled", budget_tokens=thinking_budget
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||||
|
model_args["temperature"] = options.get(
|
||||||
|
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||||
|
)
|
||||||
|
|
||||||
# To prevent infinite loops, we limit the number of iterations
|
# To prevent infinite loops, we limit the number of iterations
|
||||||
for _iteration in range(MAX_TOOL_ITERATIONS):
|
for _iteration in range(MAX_TOOL_ITERATIONS):
|
||||||
model_args = {
|
|
||||||
"model": model,
|
|
||||||
"messages": messages,
|
|
||||||
"tools": tools or NOT_GIVEN,
|
|
||||||
"max_tokens": options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
|
||||||
"system": system.content,
|
|
||||||
"stream": True,
|
|
||||||
}
|
|
||||||
if model in THINKING_MODELS and thinking_budget >= MIN_THINKING_BUDGET:
|
|
||||||
model_args["thinking"] = ThinkingConfigEnabledParam(
|
|
||||||
type="enabled", budget_tokens=thinking_budget
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
|
||||||
model_args["temperature"] = options.get(
|
|
||||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stream = await client.messages.create(**model_args)
|
stream = await client.messages.create(**model_args)
|
||||||
|
|
||||||
|
messages.extend(
|
||||||
|
_convert_content(
|
||||||
|
[
|
||||||
|
content
|
||||||
|
async for content in chat_log.async_add_delta_content_stream(
|
||||||
|
self.entity_id,
|
||||||
|
_transform_stream(chat_log, stream),
|
||||||
|
)
|
||||||
|
]
|
||||||
|
)
|
||||||
|
)
|
||||||
except anthropic.AnthropicError as err:
|
except anthropic.AnthropicError as err:
|
||||||
raise HomeAssistantError(
|
raise HomeAssistantError(
|
||||||
f"Sorry, I had a problem talking to Anthropic: {err}"
|
f"Sorry, I had a problem talking to Anthropic: {err}"
|
||||||
) from err
|
) from err
|
||||||
|
|
||||||
messages.extend(
|
|
||||||
_convert_content(
|
|
||||||
[
|
|
||||||
content
|
|
||||||
async for content in chat_log.async_add_delta_content_stream(
|
|
||||||
self.entity_id,
|
|
||||||
_transform_stream(chat_log, stream, messages),
|
|
||||||
)
|
|
||||||
if not isinstance(content, conversation.AssistantContent)
|
|
||||||
]
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if not chat_log.unresponded_tool_results:
|
if not chat_log.unresponded_tool_results:
|
||||||
break
|
break
|
||||||
|
|||||||
@@ -8,5 +8,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||||
"integration_type": "service",
|
"integration_type": "service",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"requirements": ["anthropic==0.52.0"]
|
"requirements": ["anthropic==0.62.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,5 +6,6 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["apcaccess"],
|
"loggers": ["apcaccess"],
|
||||||
|
"quality_scale": "bronze",
|
||||||
"requirements": ["aioapcaccess==0.4.2"]
|
"requirements": ["aioapcaccess==0.4.2"]
|
||||||
}
|
}
|
||||||
|
|||||||
93
homeassistant/components/apcupsd/quality_scale.yaml
Normal file
93
homeassistant/components/apcupsd/quality_scale.yaml
Normal file
@@ -0,0 +1,93 @@
|
|||||||
|
rules:
|
||||||
|
# Bronze
|
||||||
|
action-setup: done
|
||||||
|
appropriate-polling: done
|
||||||
|
brands: done
|
||||||
|
common-modules:
|
||||||
|
status: done
|
||||||
|
comment: |
|
||||||
|
Consider deriving a base entity.
|
||||||
|
config-flow-test-coverage: done
|
||||||
|
config-flow: done
|
||||||
|
dependency-transparency: done
|
||||||
|
docs-actions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not provide any actions.
|
||||||
|
docs-high-level-description: done
|
||||||
|
docs-installation-instructions: done
|
||||||
|
docs-removal-instructions: done
|
||||||
|
entity-event-setup:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
Entities of this integration does not explicitly subscribe to events.
|
||||||
|
entity-unique-id: done
|
||||||
|
has-entity-name: done
|
||||||
|
runtime-data: done
|
||||||
|
test-before-configure: done
|
||||||
|
test-before-setup: done
|
||||||
|
unique-config-entry: done
|
||||||
|
# Silver
|
||||||
|
action-exceptions:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not provide any actions.
|
||||||
|
config-entry-unloading: done
|
||||||
|
docs-configuration-parameters:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not provide any additional options.
|
||||||
|
docs-installation-parameters: done
|
||||||
|
entity-unavailable: done
|
||||||
|
integration-owner: done
|
||||||
|
log-when-unavailable: done
|
||||||
|
parallel-updates: done
|
||||||
|
reauthentication-flow:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not require authentication.
|
||||||
|
test-coverage:
|
||||||
|
status: todo
|
||||||
|
comment: |
|
||||||
|
Patch `aioapcaccess.request_status` where we use it.
|
||||||
|
# Gold
|
||||||
|
devices: done
|
||||||
|
diagnostics: done
|
||||||
|
discovery-update-info:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration cannot be discovered.
|
||||||
|
discovery:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration cannot be discovered.
|
||||||
|
docs-data-update: done
|
||||||
|
docs-examples: done
|
||||||
|
docs-known-limitations: done
|
||||||
|
docs-supported-devices: done
|
||||||
|
docs-supported-functions: done
|
||||||
|
docs-troubleshooting: done
|
||||||
|
docs-use-cases: done
|
||||||
|
dynamic-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration connects to a single service per configuration entry.
|
||||||
|
entity-category: done
|
||||||
|
entity-device-class: done
|
||||||
|
entity-disabled-by-default: done
|
||||||
|
entity-translations: done
|
||||||
|
exception-translations: done
|
||||||
|
icon-translations: done
|
||||||
|
reconfiguration-flow: done
|
||||||
|
repair-issues: done
|
||||||
|
stale-devices:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
This integration connect to a single service per configuration entry.
|
||||||
|
# Platinum
|
||||||
|
async-dependency: done
|
||||||
|
inject-websession:
|
||||||
|
status: exempt
|
||||||
|
comment: |
|
||||||
|
The integration does not connect via HTTP.
|
||||||
|
strict-typing: done
|
||||||
@@ -14,7 +14,22 @@
|
|||||||
"host": "[%key:common::config_flow::data::host%]",
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
"port": "[%key:common::config_flow::data::port%]"
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
},
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "The hostname or IP address of the APC UPS Daemon",
|
||||||
|
"port": "The port the APC UPS Daemon is listening on"
|
||||||
|
},
|
||||||
"description": "Enter the host and port on which the apcupsd NIS is being served."
|
"description": "Enter the host and port on which the apcupsd NIS is being served."
|
||||||
|
},
|
||||||
|
"reconfigure": {
|
||||||
|
"data": {
|
||||||
|
"host": "[%key:common::config_flow::data::host%]",
|
||||||
|
"port": "[%key:common::config_flow::data::port%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"host": "[%key:component::apcupsd::config::step::user::data_description::host%]",
|
||||||
|
"port": "[%key:component::apcupsd::config::step::user::data_description::port%]"
|
||||||
|
},
|
||||||
|
"description": "[%key:component::apcupsd::config::step::user::description%]"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ import time
|
|||||||
from typing import Any, Literal, final
|
from typing import Any, Literal, final
|
||||||
|
|
||||||
from hassil import Intents, recognize
|
from hassil import Intents, recognize
|
||||||
from hassil.expression import Expression, ListReference, Sequence
|
from hassil.expression import Expression, Group, ListReference
|
||||||
from hassil.intents import WildcardSlotList
|
from hassil.intents import WildcardSlotList
|
||||||
|
|
||||||
from homeassistant.components import conversation, media_source, stt, tts
|
from homeassistant.components import conversation, media_source, stt, tts
|
||||||
@@ -413,7 +413,7 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
for intent in intents.intents.values():
|
for intent in intents.intents.values():
|
||||||
for intent_data in intent.data:
|
for intent_data in intent.data:
|
||||||
for sentence in intent_data.sentences:
|
for sentence in intent_data.sentences:
|
||||||
_collect_list_references(sentence, wildcard_names)
|
_collect_list_references(sentence.expression, wildcard_names)
|
||||||
|
|
||||||
for wildcard_name in wildcard_names:
|
for wildcard_name in wildcard_names:
|
||||||
intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
|
intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
|
||||||
@@ -727,9 +727,9 @@ class AssistSatelliteEntity(entity.Entity):
|
|||||||
|
|
||||||
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
|
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
|
||||||
"""Collect list reference names recursively."""
|
"""Collect list reference names recursively."""
|
||||||
if isinstance(expression, Sequence):
|
if isinstance(expression, Group):
|
||||||
seq: Sequence = expression
|
grp: Group = expression
|
||||||
for item in seq.items:
|
for item in grp.items:
|
||||||
_collect_list_references(item, list_names)
|
_collect_list_references(item, list_names)
|
||||||
elif isinstance(expression, ListReference):
|
elif isinstance(expression, ListReference):
|
||||||
# {list}
|
# {list}
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
||||||
"integration_type": "entity",
|
"integration_type": "entity",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["hassil==2.2.3"]
|
"requirements": ["hassil==3.1.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,15 +5,16 @@ from __future__ import annotations
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from collections import namedtuple
|
from collections import namedtuple
|
||||||
from collections.abc import Awaitable, Callable, Coroutine
|
from collections.abc import Awaitable, Callable, Coroutine
|
||||||
from datetime import datetime
|
|
||||||
import functools
|
import functools
|
||||||
import logging
|
import logging
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
from aioasuswrt.asuswrt import AsusWrt as AsusWrtLegacy
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
from pyasuswrt import AsusWrtError, AsusWrtHttp
|
from asusrouter import AsusRouter, AsusRouterError
|
||||||
from pyasuswrt.exceptions import AsusWrtNotAvailableInfoError
|
from asusrouter.modules.client import AsusClient
|
||||||
|
from asusrouter.modules.data import AsusData
|
||||||
|
from asusrouter.modules.homeassistant import convert_to_ha_data, convert_to_ha_sensors
|
||||||
|
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
CONF_HOST,
|
CONF_HOST,
|
||||||
@@ -41,14 +42,13 @@ from .const import (
|
|||||||
PROTOCOL_HTTPS,
|
PROTOCOL_HTTPS,
|
||||||
PROTOCOL_TELNET,
|
PROTOCOL_TELNET,
|
||||||
SENSORS_BYTES,
|
SENSORS_BYTES,
|
||||||
SENSORS_CPU,
|
|
||||||
SENSORS_LOAD_AVG,
|
SENSORS_LOAD_AVG,
|
||||||
SENSORS_MEMORY,
|
SENSORS_MEMORY,
|
||||||
SENSORS_RATES,
|
SENSORS_RATES,
|
||||||
SENSORS_TEMPERATURES,
|
|
||||||
SENSORS_TEMPERATURES_LEGACY,
|
SENSORS_TEMPERATURES_LEGACY,
|
||||||
SENSORS_UPTIME,
|
SENSORS_UPTIME,
|
||||||
)
|
)
|
||||||
|
from .helpers import clean_dict, translate_to_legacy
|
||||||
|
|
||||||
SENSORS_TYPE_BYTES = "sensors_bytes"
|
SENSORS_TYPE_BYTES = "sensors_bytes"
|
||||||
SENSORS_TYPE_COUNT = "sensors_count"
|
SENSORS_TYPE_COUNT = "sensors_count"
|
||||||
@@ -310,16 +310,16 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
|
def __init__(self, conf: dict[str, Any], session: ClientSession) -> None:
|
||||||
"""Initialize Bridge that use HTTP library."""
|
"""Initialize Bridge that use HTTP library."""
|
||||||
super().__init__(conf[CONF_HOST])
|
super().__init__(conf[CONF_HOST])
|
||||||
self._api: AsusWrtHttp = self._get_api(conf, session)
|
self._api = self._get_api(conf, session)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusWrtHttp:
|
def _get_api(conf: dict[str, Any], session: ClientSession) -> AsusRouter:
|
||||||
"""Get the AsusWrtHttp API."""
|
"""Get the AsusRouter API."""
|
||||||
return AsusWrtHttp(
|
return AsusRouter(
|
||||||
conf[CONF_HOST],
|
hostname=conf[CONF_HOST],
|
||||||
conf[CONF_USERNAME],
|
username=conf[CONF_USERNAME],
|
||||||
conf.get(CONF_PASSWORD, ""),
|
password=conf.get(CONF_PASSWORD, ""),
|
||||||
use_https=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
|
use_ssl=conf[CONF_PROTOCOL] == PROTOCOL_HTTPS,
|
||||||
port=conf.get(CONF_PORT),
|
port=conf.get(CONF_PORT),
|
||||||
session=session,
|
session=session,
|
||||||
)
|
)
|
||||||
@@ -327,46 +327,90 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
@property
|
@property
|
||||||
def is_connected(self) -> bool:
|
def is_connected(self) -> bool:
|
||||||
"""Get connected status."""
|
"""Get connected status."""
|
||||||
return cast(bool, self._api.is_connected)
|
return self._api.connected
|
||||||
|
|
||||||
async def async_connect(self) -> None:
|
async def async_connect(self) -> None:
|
||||||
"""Connect to the device."""
|
"""Connect to the device."""
|
||||||
await self._api.async_connect()
|
await self._api.async_connect()
|
||||||
|
|
||||||
|
# Collect the identity
|
||||||
|
_identity = await self._api.async_get_identity()
|
||||||
|
|
||||||
# get main router properties
|
# get main router properties
|
||||||
if mac := self._api.mac:
|
if mac := _identity.mac:
|
||||||
self._label_mac = format_mac(mac)
|
self._label_mac = format_mac(mac)
|
||||||
self._firmware = self._api.firmware
|
self._firmware = str(_identity.firmware)
|
||||||
self._model = self._api.model
|
self._model = _identity.model
|
||||||
|
|
||||||
async def async_disconnect(self) -> None:
|
async def async_disconnect(self) -> None:
|
||||||
"""Disconnect to the device."""
|
"""Disconnect to the device."""
|
||||||
await self._api.async_disconnect()
|
await self._api.async_disconnect()
|
||||||
|
|
||||||
|
async def _get_data(
|
||||||
|
self,
|
||||||
|
datatype: AsusData,
|
||||||
|
force: bool = False,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Get data from the device.
|
||||||
|
|
||||||
|
This is a generic method which automatically converts to
|
||||||
|
the Home Assistant-compatible format.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
raw = await self._api.async_get_data(datatype, force=force)
|
||||||
|
return translate_to_legacy(clean_dict(convert_to_ha_data(raw)))
|
||||||
|
except AsusRouterError as ex:
|
||||||
|
raise UpdateFailed(ex) from ex
|
||||||
|
|
||||||
|
async def _get_sensors(self, datatype: AsusData) -> list[str]:
|
||||||
|
"""Get the available sensors.
|
||||||
|
|
||||||
|
This is a generic method which automatically converts to
|
||||||
|
the Home Assistant-compatible format.
|
||||||
|
"""
|
||||||
|
sensors = []
|
||||||
|
try:
|
||||||
|
data = await self._api.async_get_data(datatype)
|
||||||
|
# Get the list of sensors from the raw data
|
||||||
|
# and translate in to the legacy format
|
||||||
|
sensors = translate_to_legacy(convert_to_ha_sensors(data, datatype))
|
||||||
|
_LOGGER.debug("Available `%s` sensors: %s", datatype.value, sensors)
|
||||||
|
except AsusRouterError as ex:
|
||||||
|
_LOGGER.warning(
|
||||||
|
"Cannot get available `%s` sensors with exception: %s",
|
||||||
|
datatype.value,
|
||||||
|
ex,
|
||||||
|
)
|
||||||
|
return sensors
|
||||||
|
|
||||||
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
async def async_get_connected_devices(self) -> dict[str, WrtDevice]:
|
||||||
"""Get list of connected devices."""
|
"""Get list of connected devices."""
|
||||||
api_devices = await self._api.async_get_connected_devices()
|
api_devices: dict[str, AsusClient] = await self._api.async_get_data(
|
||||||
|
AsusData.CLIENTS, force=True
|
||||||
|
)
|
||||||
return {
|
return {
|
||||||
format_mac(mac): WrtDevice(dev.ip, dev.name, dev.node)
|
format_mac(mac): WrtDevice(
|
||||||
|
dev.connection.ip_address, dev.description.name, dev.connection.node
|
||||||
|
)
|
||||||
for mac, dev in api_devices.items()
|
for mac, dev in api_devices.items()
|
||||||
|
if dev.connection is not None
|
||||||
|
and dev.description is not None
|
||||||
|
and dev.connection.ip_address is not None
|
||||||
}
|
}
|
||||||
|
|
||||||
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
async def async_get_available_sensors(self) -> dict[str, dict[str, Any]]:
|
||||||
"""Return a dictionary of available sensors for this bridge."""
|
"""Return a dictionary of available sensors for this bridge."""
|
||||||
sensors_cpu = await self._get_available_cpu_sensors()
|
|
||||||
sensors_temperatures = await self._get_available_temperature_sensors()
|
|
||||||
sensors_loadavg = await self._get_loadavg_sensors_availability()
|
|
||||||
return {
|
return {
|
||||||
SENSORS_TYPE_BYTES: {
|
SENSORS_TYPE_BYTES: {
|
||||||
KEY_SENSORS: SENSORS_BYTES,
|
KEY_SENSORS: SENSORS_BYTES,
|
||||||
KEY_METHOD: self._get_bytes,
|
KEY_METHOD: self._get_bytes,
|
||||||
},
|
},
|
||||||
SENSORS_TYPE_CPU: {
|
SENSORS_TYPE_CPU: {
|
||||||
KEY_SENSORS: sensors_cpu,
|
KEY_SENSORS: await self._get_sensors(AsusData.CPU),
|
||||||
KEY_METHOD: self._get_cpu_usage,
|
KEY_METHOD: self._get_cpu_usage,
|
||||||
},
|
},
|
||||||
SENSORS_TYPE_LOAD_AVG: {
|
SENSORS_TYPE_LOAD_AVG: {
|
||||||
KEY_SENSORS: sensors_loadavg,
|
KEY_SENSORS: await self._get_sensors(AsusData.SYSINFO),
|
||||||
KEY_METHOD: self._get_load_avg,
|
KEY_METHOD: self._get_load_avg,
|
||||||
},
|
},
|
||||||
SENSORS_TYPE_MEMORY: {
|
SENSORS_TYPE_MEMORY: {
|
||||||
@@ -382,95 +426,44 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
|||||||
KEY_METHOD: self._get_uptime,
|
KEY_METHOD: self._get_uptime,
|
||||||
},
|
},
|
||||||
SENSORS_TYPE_TEMPERATURES: {
|
SENSORS_TYPE_TEMPERATURES: {
|
||||||
KEY_SENSORS: sensors_temperatures,
|
KEY_SENSORS: await self._get_sensors(AsusData.TEMPERATURE),
|
||||||
KEY_METHOD: self._get_temperatures,
|
KEY_METHOD: self._get_temperatures,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _get_available_cpu_sensors(self) -> list[str]:
|
|
||||||
"""Check which cpu information is available on the router."""
|
|
||||||
try:
|
|
||||||
available_cpu = await self._api.async_get_cpu_usage()
|
|
||||||
available_sensors = [t for t in SENSORS_CPU if t in available_cpu]
|
|
||||||
except AsusWrtError as exc:
|
|
||||||
_LOGGER.warning(
|
|
||||||
(
|
|
||||||
"Failed checking cpu sensor availability for ASUS router"
|
|
||||||
" %s. Exception: %s"
|
|
||||||
),
|
|
||||||
self.host,
|
|
||||||
exc,
|
|
||||||
)
|
|
||||||
return []
|
|
||||||
return available_sensors
|
|
||||||
|
|
||||||
async def _get_available_temperature_sensors(self) -> list[str]:
|
|
||||||
"""Check which temperature information is available on the router."""
|
|
||||||
try:
|
|
||||||
available_temps = await self._api.async_get_temperatures()
|
|
||||||
available_sensors = [
|
|
||||||
t for t in SENSORS_TEMPERATURES if t in available_temps
|
|
||||||
]
|
|
||||||
except AsusWrtError as exc:
|
|
||||||
_LOGGER.warning(
|
|
||||||
(
|
|
||||||
"Failed checking temperature sensor availability for ASUS router"
|
|
||||||
" %s. Exception: %s"
|
|
||||||
),
|
|
||||||
self.host,
|
|
||||||
exc,
|
|
||||||
)
|
|
||||||
return []
|
|
||||||
return available_sensors
|
|
||||||
|
|
||||||
async def _get_loadavg_sensors_availability(self) -> list[str]:
|
|
||||||
"""Check if load avg is available on the router."""
|
|
||||||
try:
|
|
||||||
await self._api.async_get_loadavg()
|
|
||||||
except AsusWrtNotAvailableInfoError:
|
|
||||||
return []
|
|
||||||
except AsusWrtError:
|
|
||||||
pass
|
|
||||||
return SENSORS_LOAD_AVG
|
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, SENSORS_BYTES)
|
|
||||||
async def _get_bytes(self) -> Any:
|
async def _get_bytes(self) -> Any:
|
||||||
"""Fetch byte information from the router."""
|
"""Fetch byte information from the router."""
|
||||||
return await self._api.async_get_traffic_bytes()
|
return await self._get_data(AsusData.NETWORK)
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, SENSORS_RATES)
|
|
||||||
async def _get_rates(self) -> Any:
|
async def _get_rates(self) -> Any:
|
||||||
"""Fetch rates information from the router."""
|
"""Fetch rates information from the router."""
|
||||||
return await self._api.async_get_traffic_rates()
|
data = await self._get_data(AsusData.NETWORK)
|
||||||
|
# Convert from bits/s to Bytes/s for compatibility with legacy sensors
|
||||||
|
return {
|
||||||
|
key: (
|
||||||
|
value / 8
|
||||||
|
if key in SENSORS_RATES and isinstance(value, (int, float))
|
||||||
|
else value
|
||||||
|
)
|
||||||
|
for key, value in data.items()
|
||||||
|
}
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, SENSORS_LOAD_AVG)
|
|
||||||
async def _get_load_avg(self) -> Any:
|
async def _get_load_avg(self) -> Any:
|
||||||
"""Fetch cpu load avg information from the router."""
|
"""Fetch cpu load avg information from the router."""
|
||||||
return await self._api.async_get_loadavg()
|
return await self._get_data(AsusData.SYSINFO)
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, None)
|
|
||||||
async def _get_temperatures(self) -> Any:
|
async def _get_temperatures(self) -> Any:
|
||||||
"""Fetch temperatures information from the router."""
|
"""Fetch temperatures information from the router."""
|
||||||
return await self._api.async_get_temperatures()
|
return await self._get_data(AsusData.TEMPERATURE)
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, None)
|
|
||||||
async def _get_cpu_usage(self) -> Any:
|
async def _get_cpu_usage(self) -> Any:
|
||||||
"""Fetch cpu information from the router."""
|
"""Fetch cpu information from the router."""
|
||||||
return await self._api.async_get_cpu_usage()
|
return await self._get_data(AsusData.CPU)
|
||||||
|
|
||||||
@handle_errors_and_zip(AsusWrtError, None)
|
|
||||||
async def _get_memory_usage(self) -> Any:
|
async def _get_memory_usage(self) -> Any:
|
||||||
"""Fetch memory information from the router."""
|
"""Fetch memory information from the router."""
|
||||||
return await self._api.async_get_memory_usage()
|
return await self._get_data(AsusData.RAM)
|
||||||
|
|
||||||
async def _get_uptime(self) -> dict[str, Any]:
|
async def _get_uptime(self) -> dict[str, Any]:
|
||||||
"""Fetch uptime from the router."""
|
"""Fetch uptime from the router."""
|
||||||
try:
|
return await self._get_data(AsusData.BOOTTIME)
|
||||||
uptimes = await self._api.async_get_uptime()
|
|
||||||
except AsusWrtError as exc:
|
|
||||||
raise UpdateFailed(exc) from exc
|
|
||||||
|
|
||||||
last_boot = datetime.fromisoformat(uptimes["last_boot"])
|
|
||||||
uptime = uptimes["uptime"]
|
|
||||||
|
|
||||||
return dict(zip(SENSORS_UPTIME, [last_boot, uptime], strict=False))
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ import os
|
|||||||
import socket
|
import socket
|
||||||
from typing import Any, cast
|
from typing import Any, cast
|
||||||
|
|
||||||
from pyasuswrt import AsusWrtError
|
from asusrouter import AsusRouterError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (
|
||||||
@@ -189,7 +189,7 @@ class AsusWrtFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
try:
|
try:
|
||||||
await api.async_connect()
|
await api.async_connect()
|
||||||
|
|
||||||
except (AsusWrtError, OSError):
|
except (AsusRouterError, OSError):
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Error connecting to the AsusWrt router at %s using protocol %s",
|
"Error connecting to the AsusWrt router at %s using protocol %s",
|
||||||
host,
|
host,
|
||||||
|
|||||||
56
homeassistant/components/asuswrt/helpers.py
Normal file
56
homeassistant/components/asuswrt/helpers.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
"""Helpers for AsusWRT integration."""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any, TypeVar
|
||||||
|
|
||||||
|
T = TypeVar("T", dict[str, Any], list[Any], None)
|
||||||
|
|
||||||
|
TRANSLATION_MAP = {
|
||||||
|
"wan_rx": "sensor_rx_bytes",
|
||||||
|
"wan_tx": "sensor_tx_bytes",
|
||||||
|
"total_usage": "cpu_total_usage",
|
||||||
|
"usage": "mem_usage_perc",
|
||||||
|
"free": "mem_free",
|
||||||
|
"used": "mem_used",
|
||||||
|
"wan_rx_speed": "sensor_rx_rates",
|
||||||
|
"wan_tx_speed": "sensor_tx_rates",
|
||||||
|
"2ghz": "2.4GHz",
|
||||||
|
"5ghz": "5.0GHz",
|
||||||
|
"5ghz2": "5.0GHz_2",
|
||||||
|
"6ghz": "6.0GHz",
|
||||||
|
"cpu": "CPU",
|
||||||
|
"datetime": "sensor_last_boot",
|
||||||
|
"uptime": "sensor_uptime",
|
||||||
|
**{f"{num}_usage": f"cpu{num}_usage" for num in range(1, 9)},
|
||||||
|
**{f"load_avg_{load}": f"sensor_load_avg{load}" for load in ("1", "5", "15")},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def clean_dict(raw: dict[str, Any]) -> dict[str, Any]:
|
||||||
|
"""Cleans dictionary from None values.
|
||||||
|
|
||||||
|
The `state` key is always preserved regardless of its value.
|
||||||
|
"""
|
||||||
|
|
||||||
|
return {k: v for k, v in raw.items() if v is not None or k.endswith("state")}
|
||||||
|
|
||||||
|
|
||||||
|
def translate_to_legacy(raw: T) -> T:
|
||||||
|
"""Translate raw data to legacy format for dicts and lists."""
|
||||||
|
|
||||||
|
if raw is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if isinstance(raw, dict):
|
||||||
|
return {TRANSLATION_MAP.get(k, k): v for k, v in raw.items()}
|
||||||
|
|
||||||
|
if isinstance(raw, list):
|
||||||
|
return [
|
||||||
|
TRANSLATION_MAP[item]
|
||||||
|
if isinstance(item, str) and item in TRANSLATION_MAP
|
||||||
|
else item
|
||||||
|
for item in raw
|
||||||
|
]
|
||||||
|
|
||||||
|
return raw
|
||||||
@@ -1,11 +1,11 @@
|
|||||||
{
|
{
|
||||||
"domain": "asuswrt",
|
"domain": "asuswrt",
|
||||||
"name": "ASUSWRT",
|
"name": "ASUSWRT",
|
||||||
"codeowners": ["@kennedyshead", "@ollo69"],
|
"codeowners": ["@kennedyshead", "@ollo69", "@Vaskivskyi"],
|
||||||
"config_flow": true,
|
"config_flow": true,
|
||||||
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
"documentation": "https://www.home-assistant.io/integrations/asuswrt",
|
||||||
"integration_type": "hub",
|
"integration_type": "hub",
|
||||||
"iot_class": "local_polling",
|
"iot_class": "local_polling",
|
||||||
"loggers": ["aioasuswrt", "asyncssh"],
|
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||||
"requirements": ["aioasuswrt==1.4.0", "pyasuswrt==0.1.21"]
|
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.19.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,9 +5,9 @@ from __future__ import annotations
|
|||||||
from collections.abc import Callable, Mapping
|
from collections.abc import Callable, Mapping
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from pyasuswrt import AsusWrtError
|
from asusrouter import AsusRouterError
|
||||||
|
|
||||||
from homeassistant.components.device_tracker import (
|
from homeassistant.components.device_tracker import (
|
||||||
CONF_CONSIDER_HOME,
|
CONF_CONSIDER_HOME,
|
||||||
@@ -40,6 +40,9 @@ from .const import (
|
|||||||
SENSORS_CONNECTED_DEVICE,
|
SENSORS_CONNECTED_DEVICE,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from . import AsusWrtConfigEntry
|
||||||
|
|
||||||
CONF_REQ_RELOAD = [CONF_DNSMASQ, CONF_INTERFACE, CONF_REQUIRE_IP]
|
CONF_REQ_RELOAD = [CONF_DNSMASQ, CONF_INTERFACE, CONF_REQUIRE_IP]
|
||||||
|
|
||||||
SCAN_INTERVAL = timedelta(seconds=30)
|
SCAN_INTERVAL = timedelta(seconds=30)
|
||||||
@@ -52,10 +55,13 @@ _LOGGER = logging.getLogger(__name__)
|
|||||||
class AsusWrtSensorDataHandler:
|
class AsusWrtSensorDataHandler:
|
||||||
"""Data handler for AsusWrt sensor."""
|
"""Data handler for AsusWrt sensor."""
|
||||||
|
|
||||||
def __init__(self, hass: HomeAssistant, api: AsusWrtBridge) -> None:
|
def __init__(
|
||||||
|
self, hass: HomeAssistant, api: AsusWrtBridge, entry: AsusWrtConfigEntry
|
||||||
|
) -> None:
|
||||||
"""Initialize a AsusWrt sensor data handler."""
|
"""Initialize a AsusWrt sensor data handler."""
|
||||||
self._hass = hass
|
self._hass = hass
|
||||||
self._api = api
|
self._api = api
|
||||||
|
self._entry = entry
|
||||||
self._connected_devices = 0
|
self._connected_devices = 0
|
||||||
|
|
||||||
async def _get_connected_devices(self) -> dict[str, int]:
|
async def _get_connected_devices(self) -> dict[str, int]:
|
||||||
@@ -91,6 +97,7 @@ class AsusWrtSensorDataHandler:
|
|||||||
update_method=method,
|
update_method=method,
|
||||||
# Polling interval. Will only be polled if there are subscribers.
|
# Polling interval. Will only be polled if there are subscribers.
|
||||||
update_interval=SCAN_INTERVAL if should_poll else None,
|
update_interval=SCAN_INTERVAL if should_poll else None,
|
||||||
|
config_entry=self._entry,
|
||||||
)
|
)
|
||||||
await coordinator.async_refresh()
|
await coordinator.async_refresh()
|
||||||
|
|
||||||
@@ -222,7 +229,7 @@ class AsusWrtRouter:
|
|||||||
"""Set up a AsusWrt router."""
|
"""Set up a AsusWrt router."""
|
||||||
try:
|
try:
|
||||||
await self._api.async_connect()
|
await self._api.async_connect()
|
||||||
except (AsusWrtError, OSError) as exc:
|
except (AsusRouterError, OSError) as exc:
|
||||||
raise ConfigEntryNotReady from exc
|
raise ConfigEntryNotReady from exc
|
||||||
if not self._api.is_connected:
|
if not self._api.is_connected:
|
||||||
raise ConfigEntryNotReady
|
raise ConfigEntryNotReady
|
||||||
@@ -277,7 +284,7 @@ class AsusWrtRouter:
|
|||||||
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
|
_LOGGER.debug("Checking devices for ASUS router %s", self.host)
|
||||||
try:
|
try:
|
||||||
wrt_devices = await self._api.async_get_connected_devices()
|
wrt_devices = await self._api.async_get_connected_devices()
|
||||||
except (OSError, AsusWrtError) as exc:
|
except (OSError, AsusRouterError) as exc:
|
||||||
if not self._connect_error:
|
if not self._connect_error:
|
||||||
self._connect_error = True
|
self._connect_error = True
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
@@ -321,7 +328,9 @@ class AsusWrtRouter:
|
|||||||
if self._sensors_data_handler:
|
if self._sensors_data_handler:
|
||||||
return
|
return
|
||||||
|
|
||||||
self._sensors_data_handler = AsusWrtSensorDataHandler(self.hass, self._api)
|
self._sensors_data_handler = AsusWrtSensorDataHandler(
|
||||||
|
self.hass, self._api, self._entry
|
||||||
|
)
|
||||||
self._sensors_data_handler.update_device_count(self._connected_devices)
|
self._sensors_data_handler.update_device_count(self._connected_devices)
|
||||||
|
|
||||||
sensors_types = await self._api.async_get_available_sensors()
|
sensors_types = await self._api.async_get_available_sensors()
|
||||||
|
|||||||
@@ -28,5 +28,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["pubnub", "yalexs"],
|
"loggers": ["pubnub", "yalexs"],
|
||||||
"requirements": ["yalexs==8.10.0", "yalexs-ble==3.1.0"]
|
"requirements": ["yalexs==8.11.1", "yalexs-ble==3.1.2"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
API_ABS_HUMID = "abs_humid"
|
||||||
API_CO2 = "carbon_dioxide"
|
API_CO2 = "carbon_dioxide"
|
||||||
API_DEW_POINT = "dew_point"
|
API_DEW_POINT = "dew_point"
|
||||||
API_DUST = "dust"
|
API_DUST = "dust"
|
||||||
|
|||||||
@@ -18,6 +18,7 @@ from homeassistant.config_entries import ConfigEntry
|
|||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
ATTR_CONNECTIONS,
|
ATTR_CONNECTIONS,
|
||||||
ATTR_SW_VERSION,
|
ATTR_SW_VERSION,
|
||||||
|
CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||||
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
|
||||||
CONCENTRATION_PARTS_PER_BILLION,
|
CONCENTRATION_PARTS_PER_BILLION,
|
||||||
CONCENTRATION_PARTS_PER_MILLION,
|
CONCENTRATION_PARTS_PER_MILLION,
|
||||||
@@ -33,6 +34,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
|||||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
|
API_ABS_HUMID,
|
||||||
API_CO2,
|
API_CO2,
|
||||||
API_DEW_POINT,
|
API_DEW_POINT,
|
||||||
API_DUST,
|
API_DUST,
|
||||||
@@ -120,6 +122,14 @@ SENSOR_TYPES: tuple[AwairSensorEntityDescription, ...] = (
|
|||||||
state_class=SensorStateClass.MEASUREMENT,
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
entity_registry_enabled_default=False,
|
entity_registry_enabled_default=False,
|
||||||
),
|
),
|
||||||
|
AwairSensorEntityDescription(
|
||||||
|
key=API_ABS_HUMID,
|
||||||
|
device_class=SensorDeviceClass.ABSOLUTE_HUMIDITY,
|
||||||
|
native_unit_of_measurement=CONCENTRATION_GRAMS_PER_CUBIC_METER,
|
||||||
|
unique_id_tag="absolute_humidity",
|
||||||
|
state_class=SensorStateClass.MEASUREMENT,
|
||||||
|
entity_registry_enabled_default=False,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
SENSOR_TYPES_DUST: tuple[AwairSensorEntityDescription, ...] = (
|
SENSOR_TYPES_DUST: tuple[AwairSensorEntityDescription, ...] = (
|
||||||
|
|||||||
@@ -29,7 +29,7 @@
|
|||||||
"integration_type": "device",
|
"integration_type": "device",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["axis"],
|
"loggers": ["axis"],
|
||||||
"requirements": ["axis==64"],
|
"requirements": ["axis==65"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"manufacturer": "AXIS"
|
"manufacturer": "AXIS"
|
||||||
|
|||||||
@@ -127,7 +127,6 @@ class BackupConfigData:
|
|||||||
schedule=BackupSchedule(
|
schedule=BackupSchedule(
|
||||||
days=days,
|
days=days,
|
||||||
recurrence=ScheduleRecurrence(data["schedule"]["recurrence"]),
|
recurrence=ScheduleRecurrence(data["schedule"]["recurrence"]),
|
||||||
state=ScheduleState(data["schedule"].get("state", ScheduleState.NEVER)),
|
|
||||||
time=time,
|
time=time,
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
@@ -453,7 +452,6 @@ class StoredBackupSchedule(TypedDict):
|
|||||||
|
|
||||||
days: list[Day]
|
days: list[Day]
|
||||||
recurrence: ScheduleRecurrence
|
recurrence: ScheduleRecurrence
|
||||||
state: ScheduleState
|
|
||||||
time: str | None
|
time: str | None
|
||||||
|
|
||||||
|
|
||||||
@@ -462,7 +460,6 @@ class ScheduleParametersDict(TypedDict, total=False):
|
|||||||
|
|
||||||
days: list[Day]
|
days: list[Day]
|
||||||
recurrence: ScheduleRecurrence
|
recurrence: ScheduleRecurrence
|
||||||
state: ScheduleState
|
|
||||||
time: dt.time | None
|
time: dt.time | None
|
||||||
|
|
||||||
|
|
||||||
@@ -486,32 +483,12 @@ class ScheduleRecurrence(StrEnum):
|
|||||||
CUSTOM_DAYS = "custom_days"
|
CUSTOM_DAYS = "custom_days"
|
||||||
|
|
||||||
|
|
||||||
class ScheduleState(StrEnum):
|
|
||||||
"""Represent the schedule recurrence.
|
|
||||||
|
|
||||||
This is deprecated and can be remove in HA Core 2025.8.
|
|
||||||
"""
|
|
||||||
|
|
||||||
NEVER = "never"
|
|
||||||
DAILY = "daily"
|
|
||||||
MONDAY = "mon"
|
|
||||||
TUESDAY = "tue"
|
|
||||||
WEDNESDAY = "wed"
|
|
||||||
THURSDAY = "thu"
|
|
||||||
FRIDAY = "fri"
|
|
||||||
SATURDAY = "sat"
|
|
||||||
SUNDAY = "sun"
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass(kw_only=True)
|
@dataclass(kw_only=True)
|
||||||
class BackupSchedule:
|
class BackupSchedule:
|
||||||
"""Represent the backup schedule."""
|
"""Represent the backup schedule."""
|
||||||
|
|
||||||
days: list[Day] = field(default_factory=list)
|
days: list[Day] = field(default_factory=list)
|
||||||
recurrence: ScheduleRecurrence = ScheduleRecurrence.NEVER
|
recurrence: ScheduleRecurrence = ScheduleRecurrence.NEVER
|
||||||
# Although no longer used, state is kept for backwards compatibility.
|
|
||||||
# It can be removed in HA Core 2025.8.
|
|
||||||
state: ScheduleState = ScheduleState.NEVER
|
|
||||||
time: dt.time | None = None
|
time: dt.time | None = None
|
||||||
cron_event: CronSim | None = field(init=False, default=None)
|
cron_event: CronSim | None = field(init=False, default=None)
|
||||||
next_automatic_backup: datetime | None = field(init=False, default=None)
|
next_automatic_backup: datetime | None = field(init=False, default=None)
|
||||||
@@ -610,7 +587,6 @@ class BackupSchedule:
|
|||||||
return StoredBackupSchedule(
|
return StoredBackupSchedule(
|
||||||
days=self.days,
|
days=self.days,
|
||||||
recurrence=self.recurrence,
|
recurrence=self.recurrence,
|
||||||
state=self.state,
|
|
||||||
time=self.time.isoformat() if self.time else None,
|
time=self.time.isoformat() if self.time else None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -331,9 +331,6 @@ async def handle_config_info(
|
|||||||
"""Send the stored backup config."""
|
"""Send the stored backup config."""
|
||||||
manager = hass.data[DATA_MANAGER]
|
manager = hass.data[DATA_MANAGER]
|
||||||
config = manager.config.data.to_dict()
|
config = manager.config.data.to_dict()
|
||||||
# Remove state from schedule, it's not needed in the frontend
|
|
||||||
# mypy doesn't like deleting from TypedDict, ignore it
|
|
||||||
del config["schedule"]["state"] # type: ignore[misc]
|
|
||||||
connection.send_result(
|
connection.send_result(
|
||||||
msg["id"],
|
msg["id"],
|
||||||
{
|
{
|
||||||
|
|||||||
@@ -25,7 +25,6 @@ SERVICE_TRIGGER = "trigger_camera"
|
|||||||
SERVICE_SAVE_VIDEO = "save_video"
|
SERVICE_SAVE_VIDEO = "save_video"
|
||||||
SERVICE_SAVE_RECENT_CLIPS = "save_recent_clips"
|
SERVICE_SAVE_RECENT_CLIPS = "save_recent_clips"
|
||||||
SERVICE_SEND_PIN = "send_pin"
|
SERVICE_SEND_PIN = "send_pin"
|
||||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
|
||||||
|
|
||||||
PLATFORMS = [
|
PLATFORMS = [
|
||||||
Platform.ALARM_CONTROL_PANEL,
|
Platform.ALARM_CONTROL_PANEL,
|
||||||
|
|||||||
@@ -5,12 +5,12 @@ from __future__ import annotations
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
from homeassistant.const import CONF_PIN
|
from homeassistant.const import ATTR_CONFIG_ENTRY_ID, CONF_PIN
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
|
|
||||||
from .const import ATTR_CONFIG_ENTRY_ID, DOMAIN, SERVICE_SEND_PIN
|
from .const import DOMAIN, SERVICE_SEND_PIN
|
||||||
from .coordinator import BlinkConfigEntry
|
from .coordinator import BlinkConfigEntry
|
||||||
|
|
||||||
SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
SERVICE_SEND_PIN_SCHEMA = vol.Schema(
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
"documentation": "https://www.home-assistant.io/integrations/blue_current",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["bluecurrent_api"],
|
"loggers": ["bluecurrent_api"],
|
||||||
"requirements": ["bluecurrent-api==1.2.4"]
|
"requirements": ["bluecurrent-api==1.3.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -388,12 +388,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
mode = BluetoothScanningMode.PASSIVE if passive else BluetoothScanningMode.ACTIVE
|
mode = BluetoothScanningMode.PASSIVE if passive else BluetoothScanningMode.ACTIVE
|
||||||
scanner = HaScanner(mode, adapter, address)
|
scanner = HaScanner(mode, adapter, address)
|
||||||
scanner.async_setup()
|
scanner.async_setup()
|
||||||
try:
|
|
||||||
await scanner.async_start()
|
|
||||||
except (RuntimeError, ScannerStartError) as err:
|
|
||||||
raise ConfigEntryNotReady(
|
|
||||||
f"{adapter_human_name(adapter, address)}: {err}"
|
|
||||||
) from err
|
|
||||||
adapters = await manager.async_get_bluetooth_adapters()
|
adapters = await manager.async_get_bluetooth_adapters()
|
||||||
details = adapters[adapter]
|
details = adapters[adapter]
|
||||||
if entry.title == address:
|
if entry.title == address:
|
||||||
@@ -401,8 +395,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
|||||||
entry, title=adapter_title(adapter, details)
|
entry, title=adapter_title(adapter, details)
|
||||||
)
|
)
|
||||||
slots: int = details.get(ADAPTER_CONNECTION_SLOTS) or DEFAULT_CONNECTION_SLOTS
|
slots: int = details.get(ADAPTER_CONNECTION_SLOTS) or DEFAULT_CONNECTION_SLOTS
|
||||||
|
# Register the scanner before starting so
|
||||||
|
# any raw advertisement data can be processed
|
||||||
entry.async_on_unload(async_register_scanner(hass, scanner, connection_slots=slots))
|
entry.async_on_unload(async_register_scanner(hass, scanner, connection_slots=slots))
|
||||||
await async_update_device(hass, entry, adapter, details)
|
await async_update_device(hass, entry, adapter, details)
|
||||||
|
try:
|
||||||
|
await scanner.async_start()
|
||||||
|
except (RuntimeError, ScannerStartError) as err:
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
f"{adapter_human_name(adapter, address)}: {err}"
|
||||||
|
) from err
|
||||||
entry.async_on_unload(entry.add_update_listener(async_update_listener))
|
entry.async_on_unload(entry.add_update_listener(async_update_listener))
|
||||||
entry.async_on_unload(scanner.async_stop)
|
entry.async_on_unload(scanner.async_stop)
|
||||||
return True
|
return True
|
||||||
|
|||||||
@@ -235,10 +235,9 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
|||||||
|
|
||||||
def _async_save_scanner_history(self, scanner: BaseHaScanner) -> None:
|
def _async_save_scanner_history(self, scanner: BaseHaScanner) -> None:
|
||||||
"""Save the scanner history."""
|
"""Save the scanner history."""
|
||||||
if isinstance(scanner, BaseHaRemoteScanner):
|
self.storage.async_set_advertisement_history(
|
||||||
self.storage.async_set_advertisement_history(
|
scanner.source, scanner.serialize_discovered_devices()
|
||||||
scanner.source, scanner.serialize_discovered_devices()
|
)
|
||||||
)
|
|
||||||
|
|
||||||
def _async_unregister_scanner(
|
def _async_unregister_scanner(
|
||||||
self, scanner: BaseHaScanner, unregister: CALLBACK_TYPE
|
self, scanner: BaseHaScanner, unregister: CALLBACK_TYPE
|
||||||
@@ -285,9 +284,8 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
|||||||
connection_slots: int | None = None,
|
connection_slots: int | None = None,
|
||||||
) -> CALLBACK_TYPE:
|
) -> CALLBACK_TYPE:
|
||||||
"""Register a scanner."""
|
"""Register a scanner."""
|
||||||
if isinstance(scanner, BaseHaRemoteScanner):
|
if history := self.storage.async_get_advertisement_history(scanner.source):
|
||||||
if history := self.storage.async_get_advertisement_history(scanner.source):
|
scanner.restore_discovered_devices(history)
|
||||||
scanner.restore_discovered_devices(history)
|
|
||||||
|
|
||||||
unregister = super().async_register_scanner(scanner, connection_slots)
|
unregister = super().async_register_scanner(scanner, connection_slots)
|
||||||
return partial(self._async_unregister_scanner, scanner, unregister)
|
return partial(self._async_unregister_scanner, scanner, unregister)
|
||||||
|
|||||||
@@ -16,11 +16,11 @@
|
|||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"bleak==1.0.1",
|
"bleak==1.0.1",
|
||||||
"bleak-retry-connector==4.0.0",
|
"bleak-retry-connector==4.0.1",
|
||||||
"bluetooth-adapters==2.0.0",
|
"bluetooth-adapters==2.0.0",
|
||||||
"bluetooth-auto-recovery==1.5.2",
|
"bluetooth-auto-recovery==1.5.2",
|
||||||
"bluetooth-data-tools==1.28.2",
|
"bluetooth-data-tools==1.28.2",
|
||||||
"dbus-fast==2.44.2",
|
"dbus-fast==2.44.3",
|
||||||
"habluetooth==4.0.1"
|
"habluetooth==5.0.1"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -39,7 +39,13 @@ def async_setup(hass: HomeAssistant) -> None:
|
|||||||
def serialize_service_info(
|
def serialize_service_info(
|
||||||
service_info: BluetoothServiceInfoBleak, time_diff: float
|
service_info: BluetoothServiceInfoBleak, time_diff: float
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
"""Serialize a BluetoothServiceInfoBleak object."""
|
"""Serialize a BluetoothServiceInfoBleak object.
|
||||||
|
|
||||||
|
The raw field is included for:
|
||||||
|
1. Debugging - to see the actual advertisement packet
|
||||||
|
2. Data freshness - manufacturer_data and service_data are aggregated
|
||||||
|
across multiple advertisements, raw shows the latest packet only
|
||||||
|
"""
|
||||||
return {
|
return {
|
||||||
"name": service_info.name,
|
"name": service_info.name,
|
||||||
"address": service_info.address,
|
"address": service_info.address,
|
||||||
@@ -57,6 +63,7 @@ def serialize_service_info(
|
|||||||
"connectable": service_info.connectable,
|
"connectable": service_info.connectable,
|
||||||
"time": service_info.time + time_diff,
|
"time": service_info.time + time_diff,
|
||||||
"tx_power": service_info.tx_power,
|
"tx_power": service_info.tx_power,
|
||||||
|
"raw": service_info.raw.hex() if service_info.raw else None,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -6,4 +6,3 @@ CONF_INSTALLER_CODE = "installer_code"
|
|||||||
CONF_USER_CODE = "user_code"
|
CONF_USER_CODE = "user_code"
|
||||||
ATTR_DATETIME = "datetime"
|
ATTR_DATETIME = "datetime"
|
||||||
SERVICE_SET_DATE_TIME = "set_date_time"
|
SERVICE_SET_DATE_TIME = "set_date_time"
|
||||||
ATTR_CONFIG_ENTRY_ID = "config_entry_id"
|
|
||||||
|
|||||||
@@ -9,12 +9,13 @@ from typing import Any
|
|||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntryState
|
from homeassistant.config_entries import ConfigEntryState
|
||||||
|
from homeassistant.const import ATTR_CONFIG_ENTRY_ID
|
||||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
from homeassistant.util import dt as dt_util
|
from homeassistant.util import dt as dt_util
|
||||||
|
|
||||||
from .const import ATTR_CONFIG_ENTRY_ID, ATTR_DATETIME, DOMAIN, SERVICE_SET_DATE_TIME
|
from .const import ATTR_DATETIME, DOMAIN, SERVICE_SET_DATE_TIME
|
||||||
from .types import BoschAlarmConfigEntry
|
from .types import BoschAlarmConfigEntry
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -95,7 +95,7 @@
|
|||||||
"name": "Battery missing"
|
"name": "Battery missing"
|
||||||
},
|
},
|
||||||
"panel_fault_ac_fail": {
|
"panel_fault_ac_fail": {
|
||||||
"name": "AC Failure"
|
"name": "AC failure"
|
||||||
},
|
},
|
||||||
"panel_fault_parameter_crc_fail_in_pif": {
|
"panel_fault_parameter_crc_fail_in_pif": {
|
||||||
"name": "CRC failure in panel configuration"
|
"name": "CRC failure in panel configuration"
|
||||||
|
|||||||
@@ -69,12 +69,7 @@ class SHCEntity(SHCBaseEntity):
|
|||||||
manufacturer=device.manufacturer,
|
manufacturer=device.manufacturer,
|
||||||
model=device.device_model,
|
model=device.device_model,
|
||||||
name=device.name,
|
name=device.name,
|
||||||
via_device=(
|
via_device=(DOMAIN, device.root_device_id),
|
||||||
DOMAIN,
|
|
||||||
device.parent_device_id
|
|
||||||
if device.parent_device_id is not None
|
|
||||||
else parent_id,
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
super().__init__(device=device, parent_id=parent_id, entry_id=entry_id)
|
super().__init__(device=device, parent_id=parent_id, entry_id=entry_id)
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
|
"documentation": "https://www.home-assistant.io/integrations/bosch_shc",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["boschshcpy"],
|
"loggers": ["boschshcpy"],
|
||||||
"requirements": ["boschshcpy==0.2.91"],
|
"requirements": ["boschshcpy==0.2.107"],
|
||||||
"zeroconf": [
|
"zeroconf": [
|
||||||
{
|
{
|
||||||
"type": "_http._tcp.local.",
|
"type": "_http._tcp.local.",
|
||||||
|
|||||||
@@ -53,8 +53,7 @@ async def async_setup_entry(
|
|||||||
assert unique_id is not None
|
assert unique_id is not None
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities(
|
||||||
BraviaTVButton(coordinator, unique_id, config_entry.title, description)
|
BraviaTVButton(coordinator, unique_id, description) for description in BUTTONS
|
||||||
for description in BUTTONS
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -67,11 +66,10 @@ class BraviaTVButton(BraviaTVEntity, ButtonEntity):
|
|||||||
self,
|
self,
|
||||||
coordinator: BraviaTVCoordinator,
|
coordinator: BraviaTVCoordinator,
|
||||||
unique_id: str,
|
unique_id: str,
|
||||||
model: str,
|
|
||||||
description: BraviaTVButtonDescription,
|
description: BraviaTVButtonDescription,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Initialize the button."""
|
"""Initialize the button."""
|
||||||
super().__init__(coordinator, unique_id, model)
|
super().__init__(coordinator, unique_id)
|
||||||
self._attr_unique_id = f"{unique_id}_{description.key}"
|
self._attr_unique_id = f"{unique_id}_{description.key}"
|
||||||
self.entity_description = description
|
self.entity_description = description
|
||||||
|
|
||||||
|
|||||||
@@ -79,14 +79,16 @@ class BraviaTVConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
system_info = await self.client.get_system_info()
|
system_info = await self.client.get_system_info()
|
||||||
cid = system_info[ATTR_CID].lower()
|
cid = system_info[ATTR_CID].lower()
|
||||||
title = system_info[ATTR_MODEL]
|
|
||||||
|
|
||||||
self.device_config[CONF_MAC] = system_info[ATTR_MAC]
|
self.device_config[CONF_MAC] = system_info[ATTR_MAC]
|
||||||
|
|
||||||
await self.async_set_unique_id(cid)
|
await self.async_set_unique_id(cid)
|
||||||
self._abort_if_unique_id_configured()
|
self._abort_if_unique_id_configured()
|
||||||
|
|
||||||
return self.async_create_entry(title=title, data=self.device_config)
|
return self.async_create_entry(
|
||||||
|
title=f"{system_info['name']} {system_info[ATTR_MODEL]}",
|
||||||
|
data=self.device_config,
|
||||||
|
)
|
||||||
|
|
||||||
async def async_reauth_device(self) -> ConfigFlowResult:
|
async def async_reauth_device(self) -> ConfigFlowResult:
|
||||||
"""Reauthorize Bravia TV device from config."""
|
"""Reauthorize Bravia TV device from config."""
|
||||||
|
|||||||
@@ -81,6 +81,7 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
|
|||||||
self.use_psk = config_entry.data.get(CONF_USE_PSK, False)
|
self.use_psk = config_entry.data.get(CONF_USE_PSK, False)
|
||||||
self.client_id = config_entry.data.get(CONF_CLIENT_ID, LEGACY_CLIENT_ID)
|
self.client_id = config_entry.data.get(CONF_CLIENT_ID, LEGACY_CLIENT_ID)
|
||||||
self.nickname = config_entry.data.get(CONF_NICKNAME, NICKNAME_PREFIX)
|
self.nickname = config_entry.data.get(CONF_NICKNAME, NICKNAME_PREFIX)
|
||||||
|
self.system_info: dict[str, str] = {}
|
||||||
self.source: str | None = None
|
self.source: str | None = None
|
||||||
self.source_list: list[str] = []
|
self.source_list: list[str] = []
|
||||||
self.source_map: dict[str, dict] = {}
|
self.source_map: dict[str, dict] = {}
|
||||||
@@ -150,6 +151,9 @@ class BraviaTVCoordinator(DataUpdateCoordinator[None]):
|
|||||||
self.is_on = power_status == "active"
|
self.is_on = power_status == "active"
|
||||||
self.skipped_updates = 0
|
self.skipped_updates = 0
|
||||||
|
|
||||||
|
if not self.system_info:
|
||||||
|
self.system_info = await self.client.get_system_info()
|
||||||
|
|
||||||
if self.is_on is False:
|
if self.is_on is False:
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|||||||
@@ -12,23 +12,16 @@ class BraviaTVEntity(CoordinatorEntity[BraviaTVCoordinator]):
|
|||||||
|
|
||||||
_attr_has_entity_name = True
|
_attr_has_entity_name = True
|
||||||
|
|
||||||
def __init__(
|
def __init__(self, coordinator: BraviaTVCoordinator, unique_id: str) -> None:
|
||||||
self,
|
|
||||||
coordinator: BraviaTVCoordinator,
|
|
||||||
unique_id: str,
|
|
||||||
model: str,
|
|
||||||
) -> None:
|
|
||||||
"""Initialize the entity."""
|
"""Initialize the entity."""
|
||||||
super().__init__(coordinator)
|
super().__init__(coordinator)
|
||||||
|
|
||||||
self._attr_unique_id = unique_id
|
self._attr_unique_id = unique_id
|
||||||
self._attr_device_info = DeviceInfo(
|
self._attr_device_info = DeviceInfo(
|
||||||
identifiers={(DOMAIN, unique_id)},
|
identifiers={(DOMAIN, unique_id)},
|
||||||
|
connections={(CONNECTION_NETWORK_MAC, coordinator.system_info["macAddr"])},
|
||||||
manufacturer=ATTR_MANUFACTURER,
|
manufacturer=ATTR_MANUFACTURER,
|
||||||
model=model,
|
model_id=coordinator.system_info["model"],
|
||||||
name=f"{ATTR_MANUFACTURER} {model}",
|
hw_version=coordinator.system_info["generation"],
|
||||||
|
serial_number=coordinator.system_info["serial"],
|
||||||
)
|
)
|
||||||
if coordinator.client.mac is not None:
|
|
||||||
self._attr_device_info["connections"] = {
|
|
||||||
(CONNECTION_NETWORK_MAC, coordinator.client.mac)
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -34,9 +34,7 @@ async def async_setup_entry(
|
|||||||
unique_id = config_entry.unique_id
|
unique_id = config_entry.unique_id
|
||||||
assert unique_id is not None
|
assert unique_id is not None
|
||||||
|
|
||||||
async_add_entities(
|
async_add_entities([BraviaTVMediaPlayer(coordinator, unique_id)])
|
||||||
[BraviaTVMediaPlayer(coordinator, unique_id, config_entry.title)]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):
|
class BraviaTVMediaPlayer(BraviaTVEntity, MediaPlayerEntity):
|
||||||
|
|||||||
@@ -24,7 +24,7 @@ async def async_setup_entry(
|
|||||||
unique_id = config_entry.unique_id
|
unique_id = config_entry.unique_id
|
||||||
assert unique_id is not None
|
assert unique_id is not None
|
||||||
|
|
||||||
async_add_entities([BraviaTVRemote(coordinator, unique_id, config_entry.title)])
|
async_add_entities([BraviaTVRemote(coordinator, unique_id)])
|
||||||
|
|
||||||
|
|
||||||
class BraviaTVRemote(BraviaTVEntity, RemoteEntity):
|
class BraviaTVRemote(BraviaTVEntity, RemoteEntity):
|
||||||
|
|||||||
@@ -64,6 +64,7 @@ class BroadlinkUpdateManager(ABC, Generic[_ApiT]):
|
|||||||
device.hass,
|
device.hass,
|
||||||
_LOGGER,
|
_LOGGER,
|
||||||
name=f"{device.name} ({device.api.model} at {device.api.host[0]})",
|
name=f"{device.name} ({device.api.model} at {device.api.host[0]})",
|
||||||
|
config_entry=device.config,
|
||||||
update_method=self.async_update,
|
update_method=self.async_update,
|
||||||
update_interval=self.SCAN_INTERVAL,
|
update_interval=self.SCAN_INTERVAL,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,7 +2,16 @@
|
|||||||
|
|
||||||
import dataclasses
|
import dataclasses
|
||||||
|
|
||||||
from bsblan import BSBLAN, BSBLANConfig, Device, Info, StaticState
|
from bsblan import (
|
||||||
|
BSBLAN,
|
||||||
|
BSBLANAuthError,
|
||||||
|
BSBLANConfig,
|
||||||
|
BSBLANConnectionError,
|
||||||
|
BSBLANError,
|
||||||
|
Device,
|
||||||
|
Info,
|
||||||
|
StaticState,
|
||||||
|
)
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import (
|
from homeassistant.const import (
|
||||||
@@ -13,9 +22,14 @@ from homeassistant.const import (
|
|||||||
Platform,
|
Platform,
|
||||||
)
|
)
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import (
|
||||||
|
ConfigEntryAuthFailed,
|
||||||
|
ConfigEntryError,
|
||||||
|
ConfigEntryNotReady,
|
||||||
|
)
|
||||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||||
|
|
||||||
from .const import CONF_PASSKEY
|
from .const import CONF_PASSKEY, DOMAIN
|
||||||
from .coordinator import BSBLanUpdateCoordinator
|
from .coordinator import BSBLanUpdateCoordinator
|
||||||
|
|
||||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
||||||
@@ -54,10 +68,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
|||||||
coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan)
|
coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan)
|
||||||
await coordinator.async_config_entry_first_refresh()
|
await coordinator.async_config_entry_first_refresh()
|
||||||
|
|
||||||
# Fetch all required data concurrently
|
try:
|
||||||
device = await bsblan.device()
|
# Fetch all required data sequentially
|
||||||
info = await bsblan.info()
|
device = await bsblan.device()
|
||||||
static = await bsblan.static_values()
|
info = await bsblan.info()
|
||||||
|
static = await bsblan.static_values()
|
||||||
|
except BSBLANConnectionError as err:
|
||||||
|
raise ConfigEntryNotReady(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="setup_connection_error",
|
||||||
|
translation_placeholders={"host": entry.data[CONF_HOST]},
|
||||||
|
) from err
|
||||||
|
except BSBLANAuthError as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="setup_auth_error",
|
||||||
|
) from err
|
||||||
|
except BSBLANError as err:
|
||||||
|
raise ConfigEntryError(
|
||||||
|
translation_domain=DOMAIN,
|
||||||
|
translation_key="setup_general_error",
|
||||||
|
) from err
|
||||||
|
|
||||||
entry.runtime_data = BSBLanData(
|
entry.runtime_data = BSBLanData(
|
||||||
client=bsblan,
|
client=bsblan,
|
||||||
|
|||||||
@@ -2,9 +2,10 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from bsblan import BSBLAN, BSBLANConfig, BSBLANError
|
from bsblan import BSBLAN, BSBLANAuthError, BSBLANConfig, BSBLANError
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||||
@@ -45,7 +46,7 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
self.username = user_input.get(CONF_USERNAME)
|
self.username = user_input.get(CONF_USERNAME)
|
||||||
self.password = user_input.get(CONF_PASSWORD)
|
self.password = user_input.get(CONF_PASSWORD)
|
||||||
|
|
||||||
return await self._validate_and_create()
|
return await self._validate_and_create(user_input)
|
||||||
|
|
||||||
async def async_step_zeroconf(
|
async def async_step_zeroconf(
|
||||||
self, discovery_info: ZeroconfServiceInfo
|
self, discovery_info: ZeroconfServiceInfo
|
||||||
@@ -128,14 +129,29 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
self.username = user_input.get(CONF_USERNAME)
|
self.username = user_input.get(CONF_USERNAME)
|
||||||
self.password = user_input.get(CONF_PASSWORD)
|
self.password = user_input.get(CONF_PASSWORD)
|
||||||
|
|
||||||
return await self._validate_and_create(is_discovery=True)
|
return await self._validate_and_create(user_input, is_discovery=True)
|
||||||
|
|
||||||
async def _validate_and_create(
|
async def _validate_and_create(
|
||||||
self, is_discovery: bool = False
|
self, user_input: dict[str, Any], is_discovery: bool = False
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
"""Validate device connection and create entry."""
|
"""Validate device connection and create entry."""
|
||||||
try:
|
try:
|
||||||
await self._get_bsblan_info(is_discovery=is_discovery)
|
await self._get_bsblan_info()
|
||||||
|
except BSBLANAuthError:
|
||||||
|
if is_discovery:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="discovery_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(CONF_PASSKEY): str,
|
||||||
|
vol.Optional(CONF_USERNAME): str,
|
||||||
|
vol.Optional(CONF_PASSWORD): str,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors={"base": "invalid_auth"},
|
||||||
|
description_placeholders={"host": str(self.host)},
|
||||||
|
)
|
||||||
|
return self._show_setup_form({"base": "invalid_auth"}, user_input)
|
||||||
except BSBLANError:
|
except BSBLANError:
|
||||||
if is_discovery:
|
if is_discovery:
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
@@ -154,18 +170,137 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
return self._async_create_entry()
|
return self._async_create_entry()
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reauth flow."""
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reauth confirmation flow."""
|
||||||
|
existing_entry = self.hass.config_entries.async_get_entry(
|
||||||
|
self.context["entry_id"]
|
||||||
|
)
|
||||||
|
assert existing_entry
|
||||||
|
|
||||||
|
if user_input is None:
|
||||||
|
# Preserve existing values as defaults
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(
|
||||||
|
CONF_PASSKEY,
|
||||||
|
default=existing_entry.data.get(
|
||||||
|
CONF_PASSKEY, vol.UNDEFINED
|
||||||
|
),
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_USERNAME,
|
||||||
|
default=existing_entry.data.get(
|
||||||
|
CONF_USERNAME, vol.UNDEFINED
|
||||||
|
),
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_PASSWORD,
|
||||||
|
default=vol.UNDEFINED,
|
||||||
|
): str,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Combine existing data with the user's new input for validation.
|
||||||
|
# This correctly handles adding, changing, and clearing credentials.
|
||||||
|
config_data = existing_entry.data.copy()
|
||||||
|
config_data.update(user_input)
|
||||||
|
|
||||||
|
self.host = config_data[CONF_HOST]
|
||||||
|
self.port = config_data[CONF_PORT]
|
||||||
|
self.passkey = config_data.get(CONF_PASSKEY)
|
||||||
|
self.username = config_data.get(CONF_USERNAME)
|
||||||
|
self.password = config_data.get(CONF_PASSWORD)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self._get_bsblan_info(raise_on_progress=False, is_reauth=True)
|
||||||
|
except BSBLANAuthError:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(
|
||||||
|
CONF_PASSKEY,
|
||||||
|
default=user_input.get(CONF_PASSKEY, vol.UNDEFINED),
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_USERNAME,
|
||||||
|
default=user_input.get(CONF_USERNAME, vol.UNDEFINED),
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_PASSWORD,
|
||||||
|
default=vol.UNDEFINED,
|
||||||
|
): str,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors={"base": "invalid_auth"},
|
||||||
|
)
|
||||||
|
except BSBLANError:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=vol.Schema(
|
||||||
|
{
|
||||||
|
vol.Optional(
|
||||||
|
CONF_PASSKEY,
|
||||||
|
default=user_input.get(CONF_PASSKEY, vol.UNDEFINED),
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_USERNAME,
|
||||||
|
default=user_input.get(CONF_USERNAME, vol.UNDEFINED),
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_PASSWORD,
|
||||||
|
default=vol.UNDEFINED,
|
||||||
|
): str,
|
||||||
|
}
|
||||||
|
),
|
||||||
|
errors={"base": "cannot_connect"},
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update only the fields that were provided by the user
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
existing_entry, data_updates=user_input, reason="reauth_successful"
|
||||||
|
)
|
||||||
|
|
||||||
@callback
|
@callback
|
||||||
def _show_setup_form(self, errors: dict | None = None) -> ConfigFlowResult:
|
def _show_setup_form(
|
||||||
|
self, errors: dict | None = None, user_input: dict[str, Any] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
"""Show the setup form to the user."""
|
"""Show the setup form to the user."""
|
||||||
|
# Preserve user input if provided, otherwise use defaults
|
||||||
|
defaults = user_input or {}
|
||||||
|
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user",
|
step_id="user",
|
||||||
data_schema=vol.Schema(
|
data_schema=vol.Schema(
|
||||||
{
|
{
|
||||||
vol.Required(CONF_HOST): str,
|
vol.Required(
|
||||||
vol.Optional(CONF_PORT, default=DEFAULT_PORT): int,
|
CONF_HOST, default=defaults.get(CONF_HOST, vol.UNDEFINED)
|
||||||
vol.Optional(CONF_PASSKEY): str,
|
): str,
|
||||||
vol.Optional(CONF_USERNAME): str,
|
vol.Optional(
|
||||||
vol.Optional(CONF_PASSWORD): str,
|
CONF_PORT, default=defaults.get(CONF_PORT, DEFAULT_PORT)
|
||||||
|
): int,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_PASSKEY, default=defaults.get(CONF_PASSKEY, vol.UNDEFINED)
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_USERNAME,
|
||||||
|
default=defaults.get(CONF_USERNAME, vol.UNDEFINED),
|
||||||
|
): str,
|
||||||
|
vol.Optional(
|
||||||
|
CONF_PASSWORD,
|
||||||
|
default=defaults.get(CONF_PASSWORD, vol.UNDEFINED),
|
||||||
|
): str,
|
||||||
}
|
}
|
||||||
),
|
),
|
||||||
errors=errors or {},
|
errors=errors or {},
|
||||||
@@ -186,7 +321,9 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
)
|
)
|
||||||
|
|
||||||
async def _get_bsblan_info(
|
async def _get_bsblan_info(
|
||||||
self, raise_on_progress: bool = True, is_discovery: bool = False
|
self,
|
||||||
|
raise_on_progress: bool = True,
|
||||||
|
is_reauth: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Get device information from a BSBLAN device."""
|
"""Get device information from a BSBLAN device."""
|
||||||
config = BSBLANConfig(
|
config = BSBLANConfig(
|
||||||
@@ -209,11 +346,13 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN):
|
|||||||
format_mac(self.mac), raise_on_progress=raise_on_progress
|
format_mac(self.mac), raise_on_progress=raise_on_progress
|
||||||
)
|
)
|
||||||
|
|
||||||
# Always allow updating host/port for both user and discovery flows
|
# Skip unique_id configuration check during reauth to prevent "already_configured" abort
|
||||||
# This ensures connectivity is maintained when devices change IP addresses
|
if not is_reauth:
|
||||||
self._abort_if_unique_id_configured(
|
# Always allow updating host/port for both user and discovery flows
|
||||||
updates={
|
# This ensures connectivity is maintained when devices change IP addresses
|
||||||
CONF_HOST: self.host,
|
self._abort_if_unique_id_configured(
|
||||||
CONF_PORT: self.port,
|
updates={
|
||||||
}
|
CONF_HOST: self.host,
|
||||||
)
|
CONF_PORT: self.port,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|||||||
@@ -4,11 +4,19 @@ from dataclasses import dataclass
|
|||||||
from datetime import timedelta
|
from datetime import timedelta
|
||||||
from random import randint
|
from random import randint
|
||||||
|
|
||||||
from bsblan import BSBLAN, BSBLANConnectionError, HotWaterState, Sensor, State
|
from bsblan import (
|
||||||
|
BSBLAN,
|
||||||
|
BSBLANAuthError,
|
||||||
|
BSBLANConnectionError,
|
||||||
|
HotWaterState,
|
||||||
|
Sensor,
|
||||||
|
State,
|
||||||
|
)
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_HOST
|
from homeassistant.const import CONF_HOST
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||||
|
|
||||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
|
from .const import DOMAIN, LOGGER, SCAN_INTERVAL
|
||||||
@@ -62,6 +70,10 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]):
|
|||||||
state = await self.client.state()
|
state = await self.client.state()
|
||||||
sensor = await self.client.sensor()
|
sensor = await self.client.sensor()
|
||||||
dhw = await self.client.hot_water_state()
|
dhw = await self.client.hot_water_state()
|
||||||
|
except BSBLANAuthError as err:
|
||||||
|
raise ConfigEntryAuthFailed(
|
||||||
|
"Authentication failed for BSB-Lan device"
|
||||||
|
) from err
|
||||||
except BSBLANConnectionError as err:
|
except BSBLANConnectionError as err:
|
||||||
host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown"
|
host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown"
|
||||||
raise UpdateFailed(
|
raise UpdateFailed(
|
||||||
|
|||||||
@@ -33,14 +33,30 @@
|
|||||||
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
|
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
|
||||||
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
|
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"reauth_confirm": {
|
||||||
|
"title": "[%key:common::config_flow::title::reauth%]",
|
||||||
|
"description": "The BSB-Lan integration needs to re-authenticate with {name}",
|
||||||
|
"data": {
|
||||||
|
"passkey": "[%key:component::bsblan::config::step::user::data::passkey%]",
|
||||||
|
"username": "[%key:common::config_flow::data::username%]",
|
||||||
|
"password": "[%key:common::config_flow::data::password%]"
|
||||||
|
},
|
||||||
|
"data_description": {
|
||||||
|
"passkey": "[%key:component::bsblan::config::step::user::data_description::passkey%]",
|
||||||
|
"username": "[%key:component::bsblan::config::step::user::data_description::username%]",
|
||||||
|
"password": "[%key:component::bsblan::config::step::user::data_description::password%]"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
|
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]"
|
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||||
|
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"exceptions": {
|
"exceptions": {
|
||||||
@@ -55,6 +71,15 @@
|
|||||||
},
|
},
|
||||||
"set_operation_mode_error": {
|
"set_operation_mode_error": {
|
||||||
"message": "An error occurred while setting the operation mode"
|
"message": "An error occurred while setting the operation mode"
|
||||||
|
},
|
||||||
|
"setup_connection_error": {
|
||||||
|
"message": "Failed to retrieve static device data from BSB-Lan device at {host}"
|
||||||
|
},
|
||||||
|
"setup_auth_error": {
|
||||||
|
"message": "Authentication failed while retrieving static device data"
|
||||||
|
},
|
||||||
|
"setup_general_error": {
|
||||||
|
"message": "An unknown error occurred while retrieving static device data"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"entity": {
|
"entity": {
|
||||||
|
|||||||
@@ -25,7 +25,7 @@
|
|||||||
"services": {
|
"services": {
|
||||||
"press": {
|
"press": {
|
||||||
"name": "Press",
|
"name": "Press",
|
||||||
"description": "Press the button entity."
|
"description": "Presses a button entity."
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
"documentation": "https://www.home-assistant.io/integrations/caldav",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["caldav", "vobject"],
|
"loggers": ["caldav", "vobject"],
|
||||||
"requirements": ["caldav==1.6.0", "icalendar==6.1.0"]
|
"requirements": ["caldav==1.6.0", "icalendar==6.3.1"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -255,7 +255,7 @@ class ClimateEntity(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
|||||||
)
|
)
|
||||||
|
|
||||||
entity_description: ClimateEntityDescription
|
entity_description: ClimateEntityDescription
|
||||||
_attr_current_humidity: int | None = None
|
_attr_current_humidity: float | None = None
|
||||||
_attr_current_temperature: float | None = None
|
_attr_current_temperature: float | None = None
|
||||||
_attr_fan_mode: str | None
|
_attr_fan_mode: str | None
|
||||||
_attr_fan_modes: list[str] | None
|
_attr_fan_modes: list[str] | None
|
||||||
|
|||||||
@@ -100,16 +100,10 @@ set_hvac_mode:
|
|||||||
fields:
|
fields:
|
||||||
hvac_mode:
|
hvac_mode:
|
||||||
selector:
|
selector:
|
||||||
select:
|
state:
|
||||||
options:
|
hide_states:
|
||||||
- "off"
|
- unavailable
|
||||||
- "auto"
|
- unknown
|
||||||
- "cool"
|
|
||||||
- "dry"
|
|
||||||
- "fan_only"
|
|
||||||
- "heat_cool"
|
|
||||||
- "heat"
|
|
||||||
translation_key: hvac_mode
|
|
||||||
set_swing_mode:
|
set_swing_mode:
|
||||||
target:
|
target:
|
||||||
entity:
|
entity:
|
||||||
|
|||||||
@@ -6,12 +6,16 @@ import asyncio
|
|||||||
from collections.abc import Callable
|
from collections.abc import Callable
|
||||||
from contextlib import suppress
|
from contextlib import suppress
|
||||||
from datetime import datetime, timedelta
|
from datetime import datetime, timedelta
|
||||||
from http import HTTPStatus
|
|
||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
import aiohttp
|
import aiohttp
|
||||||
from hass_nabucasa import Cloud, cloud_api
|
from hass_nabucasa import AlexaApiError, Cloud
|
||||||
|
from hass_nabucasa.alexa_api import (
|
||||||
|
AlexaAccessTokenDetails,
|
||||||
|
AlexaApiNeedsRelinkError,
|
||||||
|
AlexaApiNoTokenError,
|
||||||
|
)
|
||||||
from yarl import URL
|
from yarl import URL
|
||||||
|
|
||||||
from homeassistant.components import persistent_notification
|
from homeassistant.components import persistent_notification
|
||||||
@@ -146,7 +150,7 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||||||
self._cloud_user = cloud_user
|
self._cloud_user = cloud_user
|
||||||
self._prefs = prefs
|
self._prefs = prefs
|
||||||
self._cloud = cloud
|
self._cloud = cloud
|
||||||
self._token = None
|
self._token: str | None = None
|
||||||
self._token_valid: datetime | None = None
|
self._token_valid: datetime | None = None
|
||||||
self._cur_entity_prefs = async_get_assistant_settings(hass, CLOUD_ALEXA)
|
self._cur_entity_prefs = async_get_assistant_settings(hass, CLOUD_ALEXA)
|
||||||
self._alexa_sync_unsub: Callable[[], None] | None = None
|
self._alexa_sync_unsub: Callable[[], None] | None = None
|
||||||
@@ -318,32 +322,31 @@ class CloudAlexaConfig(alexa_config.AbstractConfig):
|
|||||||
|
|
||||||
async def async_get_access_token(self) -> str | None:
|
async def async_get_access_token(self) -> str | None:
|
||||||
"""Get an access token."""
|
"""Get an access token."""
|
||||||
|
details: AlexaAccessTokenDetails | None
|
||||||
if self._token_valid is not None and self._token_valid > utcnow():
|
if self._token_valid is not None and self._token_valid > utcnow():
|
||||||
return self._token
|
return self._token
|
||||||
|
|
||||||
resp = await cloud_api.async_alexa_access_token(self._cloud)
|
try:
|
||||||
body = await resp.json()
|
details = await self._cloud.alexa_api.access_token()
|
||||||
|
except AlexaApiNeedsRelinkError as exception:
|
||||||
|
if self.should_report_state:
|
||||||
|
persistent_notification.async_create(
|
||||||
|
self.hass,
|
||||||
|
(
|
||||||
|
"There was an error reporting state to Alexa"
|
||||||
|
f" ({exception.reason}). Please re-link your Alexa skill via"
|
||||||
|
" the Alexa app to continue using it."
|
||||||
|
),
|
||||||
|
"Alexa state reporting disabled",
|
||||||
|
"cloud_alexa_report",
|
||||||
|
)
|
||||||
|
raise alexa_errors.RequireRelink from exception
|
||||||
|
except (AlexaApiNoTokenError, AlexaApiError) as exception:
|
||||||
|
raise alexa_errors.NoTokenAvailable from exception
|
||||||
|
|
||||||
if resp.status == HTTPStatus.BAD_REQUEST:
|
self._token = details["access_token"]
|
||||||
if body["reason"] in ("RefreshTokenNotFound", "UnknownRegion"):
|
self._endpoint = details["event_endpoint"]
|
||||||
if self.should_report_state:
|
self._token_valid = utcnow() + timedelta(seconds=details["expires_in"])
|
||||||
persistent_notification.async_create(
|
|
||||||
self.hass,
|
|
||||||
(
|
|
||||||
"There was an error reporting state to Alexa"
|
|
||||||
f" ({body['reason']}). Please re-link your Alexa skill via"
|
|
||||||
" the Alexa app to continue using it."
|
|
||||||
),
|
|
||||||
"Alexa state reporting disabled",
|
|
||||||
"cloud_alexa_report",
|
|
||||||
)
|
|
||||||
raise alexa_errors.RequireRelink
|
|
||||||
|
|
||||||
raise alexa_errors.NoTokenAvailable
|
|
||||||
|
|
||||||
self._token = body["access_token"]
|
|
||||||
self._endpoint = body["event_endpoint"]
|
|
||||||
self._token_valid = utcnow() + timedelta(seconds=body["expires_in"])
|
|
||||||
return self._token
|
return self._token
|
||||||
|
|
||||||
async def _async_prefs_updated(self, prefs: CloudPreferences) -> None:
|
async def _async_prefs_updated(self, prefs: CloudPreferences) -> None:
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ from http import HTTPStatus
|
|||||||
import logging
|
import logging
|
||||||
from typing import TYPE_CHECKING, Any
|
from typing import TYPE_CHECKING, Any
|
||||||
|
|
||||||
from hass_nabucasa import Cloud, cloud_api
|
from hass_nabucasa import Cloud
|
||||||
from hass_nabucasa.google_report_state import ErrorResponse
|
from hass_nabucasa.google_report_state import ErrorResponse
|
||||||
|
|
||||||
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
|
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
|
||||||
@@ -377,7 +377,7 @@ class CloudGoogleConfig(AbstractConfig):
|
|||||||
return HTTPStatus.OK
|
return HTTPStatus.OK
|
||||||
|
|
||||||
async with self._sync_entities_lock:
|
async with self._sync_entities_lock:
|
||||||
resp = await cloud_api.async_google_actions_request_sync(self._cloud)
|
resp = await self._cloud.google_report_state.request_sync()
|
||||||
return resp.status
|
return resp.status
|
||||||
|
|
||||||
async def async_connect_agent_user(self, agent_user_id: str) -> None:
|
async def async_connect_agent_user(self, agent_user_id: str) -> None:
|
||||||
|
|||||||
@@ -13,6 +13,6 @@
|
|||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"iot_class": "cloud_push",
|
"iot_class": "cloud_push",
|
||||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||||
"requirements": ["hass-nabucasa==0.110.0"],
|
"requirements": ["hass-nabucasa==1.0.0"],
|
||||||
"single_config_entry": true
|
"single_config_entry": true
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -4,11 +4,13 @@ from __future__ import annotations
|
|||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from aiohttp.client_exceptions import ClientError
|
from hass_nabucasa import (
|
||||||
from hass_nabucasa import Cloud, cloud_api
|
Cloud,
|
||||||
from hass_nabucasa.payments_api import PaymentsApiError, SubscriptionInfo
|
MigratePaypalAgreementInfo,
|
||||||
|
PaymentsApiError,
|
||||||
|
SubscriptionInfo,
|
||||||
|
)
|
||||||
|
|
||||||
from .client import CloudClient
|
from .client import CloudClient
|
||||||
from .const import REQUEST_TIMEOUT
|
from .const import REQUEST_TIMEOUT
|
||||||
@@ -29,17 +31,17 @@ async def async_subscription_info(cloud: Cloud[CloudClient]) -> SubscriptionInfo
|
|||||||
|
|
||||||
async def async_migrate_paypal_agreement(
|
async def async_migrate_paypal_agreement(
|
||||||
cloud: Cloud[CloudClient],
|
cloud: Cloud[CloudClient],
|
||||||
) -> dict[str, Any] | None:
|
) -> MigratePaypalAgreementInfo | None:
|
||||||
"""Migrate a paypal agreement from legacy."""
|
"""Migrate a paypal agreement from legacy."""
|
||||||
try:
|
try:
|
||||||
async with asyncio.timeout(REQUEST_TIMEOUT):
|
async with asyncio.timeout(REQUEST_TIMEOUT):
|
||||||
return await cloud_api.async_migrate_paypal_agreement(cloud)
|
return await cloud.payments.migrate_paypal_agreement()
|
||||||
except TimeoutError:
|
except TimeoutError:
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"A timeout of %s was reached while trying to start agreement migration",
|
"A timeout of %s was reached while trying to start agreement migration",
|
||||||
REQUEST_TIMEOUT,
|
REQUEST_TIMEOUT,
|
||||||
)
|
)
|
||||||
except ClientError as exception:
|
except PaymentsApiError as exception:
|
||||||
_LOGGER.error("Failed to start agreement migration - %s", exception)
|
_LOGGER.error("Failed to start agreement migration - %s", exception)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|||||||
@@ -7,22 +7,18 @@ import logging
|
|||||||
|
|
||||||
from coinbase.rest import RESTClient
|
from coinbase.rest import RESTClient
|
||||||
from coinbase.rest.rest_base import HTTPError
|
from coinbase.rest.rest_base import HTTPError
|
||||||
from coinbase.wallet.client import Client as LegacyClient
|
|
||||||
from coinbase.wallet.error import AuthenticationError
|
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigEntry
|
from homeassistant.config_entries import ConfigEntry
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform
|
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
from homeassistant.helpers import entity_registry as er
|
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||||
from homeassistant.util import Throttle
|
from homeassistant.util import Throttle
|
||||||
|
|
||||||
from .const import (
|
from .const import (
|
||||||
ACCOUNT_IS_VAULT,
|
ACCOUNT_IS_VAULT,
|
||||||
API_ACCOUNT_AMOUNT,
|
API_ACCOUNT_AMOUNT,
|
||||||
API_ACCOUNT_AVALIABLE,
|
API_ACCOUNT_AVALIABLE,
|
||||||
API_ACCOUNT_BALANCE,
|
|
||||||
API_ACCOUNT_CURRENCY,
|
API_ACCOUNT_CURRENCY,
|
||||||
API_ACCOUNT_CURRENCY_CODE,
|
|
||||||
API_ACCOUNT_HOLD,
|
API_ACCOUNT_HOLD,
|
||||||
API_ACCOUNT_ID,
|
API_ACCOUNT_ID,
|
||||||
API_ACCOUNT_NAME,
|
API_ACCOUNT_NAME,
|
||||||
@@ -31,12 +27,9 @@ from .const import (
|
|||||||
API_DATA,
|
API_DATA,
|
||||||
API_RATES_CURRENCY,
|
API_RATES_CURRENCY,
|
||||||
API_RESOURCE_TYPE,
|
API_RESOURCE_TYPE,
|
||||||
API_TYPE_VAULT,
|
|
||||||
API_V3_ACCOUNT_ID,
|
API_V3_ACCOUNT_ID,
|
||||||
API_V3_TYPE_VAULT,
|
API_V3_TYPE_VAULT,
|
||||||
CONF_CURRENCIES,
|
|
||||||
CONF_EXCHANGE_BASE,
|
CONF_EXCHANGE_BASE,
|
||||||
CONF_EXCHANGE_RATES,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
@@ -51,9 +44,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
|
|||||||
"""Set up Coinbase from a config entry."""
|
"""Set up Coinbase from a config entry."""
|
||||||
|
|
||||||
instance = await hass.async_add_executor_job(create_and_update_instance, entry)
|
instance = await hass.async_add_executor_job(create_and_update_instance, entry)
|
||||||
|
|
||||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
|
||||||
|
|
||||||
entry.runtime_data = instance
|
entry.runtime_data = instance
|
||||||
|
|
||||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||||
@@ -68,68 +58,28 @@ async def async_unload_entry(hass: HomeAssistant, entry: CoinbaseConfigEntry) ->
|
|||||||
|
|
||||||
def create_and_update_instance(entry: CoinbaseConfigEntry) -> CoinbaseData:
|
def create_and_update_instance(entry: CoinbaseConfigEntry) -> CoinbaseData:
|
||||||
"""Create and update a Coinbase Data instance."""
|
"""Create and update a Coinbase Data instance."""
|
||||||
|
|
||||||
|
# Check if user is using deprecated v2 API credentials
|
||||||
if "organizations" not in entry.data[CONF_API_KEY]:
|
if "organizations" not in entry.data[CONF_API_KEY]:
|
||||||
client = LegacyClient(entry.data[CONF_API_KEY], entry.data[CONF_API_TOKEN])
|
# Trigger reauthentication to ask user for v3 credentials
|
||||||
version = "v2"
|
raise ConfigEntryAuthFailed(
|
||||||
else:
|
"Your Coinbase API key appears to be for the deprecated v2 API. "
|
||||||
client = RESTClient(
|
"Please reconfigure with a new API key created for the v3 API. "
|
||||||
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
|
"Visit https://www.coinbase.com/developer-platform to create new credentials."
|
||||||
)
|
)
|
||||||
version = "v3"
|
|
||||||
|
client = RESTClient(
|
||||||
|
api_key=entry.data[CONF_API_KEY], api_secret=entry.data[CONF_API_TOKEN]
|
||||||
|
)
|
||||||
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
|
base_rate = entry.options.get(CONF_EXCHANGE_BASE, "USD")
|
||||||
instance = CoinbaseData(client, base_rate, version)
|
instance = CoinbaseData(client, base_rate)
|
||||||
instance.update()
|
instance.update()
|
||||||
return instance
|
return instance
|
||||||
|
|
||||||
|
|
||||||
async def update_listener(
|
def get_accounts(client):
|
||||||
hass: HomeAssistant, config_entry: CoinbaseConfigEntry
|
|
||||||
) -> None:
|
|
||||||
"""Handle options update."""
|
|
||||||
|
|
||||||
await hass.config_entries.async_reload(config_entry.entry_id)
|
|
||||||
|
|
||||||
registry = er.async_get(hass)
|
|
||||||
entities = er.async_entries_for_config_entry(registry, config_entry.entry_id)
|
|
||||||
|
|
||||||
# Remove orphaned entities
|
|
||||||
for entity in entities:
|
|
||||||
currency = entity.unique_id.split("-")[-1]
|
|
||||||
if (
|
|
||||||
"xe" in entity.unique_id
|
|
||||||
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
|
|
||||||
) or (
|
|
||||||
"wallet" in entity.unique_id
|
|
||||||
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
|
|
||||||
):
|
|
||||||
registry.async_remove(entity.entity_id)
|
|
||||||
|
|
||||||
|
|
||||||
def get_accounts(client, version):
|
|
||||||
"""Handle paginated accounts."""
|
"""Handle paginated accounts."""
|
||||||
response = client.get_accounts()
|
response = client.get_accounts()
|
||||||
if version == "v2":
|
|
||||||
accounts = response[API_DATA]
|
|
||||||
next_starting_after = response.pagination.next_starting_after
|
|
||||||
|
|
||||||
while next_starting_after:
|
|
||||||
response = client.get_accounts(starting_after=next_starting_after)
|
|
||||||
accounts += response[API_DATA]
|
|
||||||
next_starting_after = response.pagination.next_starting_after
|
|
||||||
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
API_ACCOUNT_ID: account[API_ACCOUNT_ID],
|
|
||||||
API_ACCOUNT_NAME: account[API_ACCOUNT_NAME],
|
|
||||||
API_ACCOUNT_CURRENCY: account[API_ACCOUNT_CURRENCY][
|
|
||||||
API_ACCOUNT_CURRENCY_CODE
|
|
||||||
],
|
|
||||||
API_ACCOUNT_AMOUNT: account[API_ACCOUNT_BALANCE][API_ACCOUNT_AMOUNT],
|
|
||||||
ACCOUNT_IS_VAULT: account[API_RESOURCE_TYPE] == API_TYPE_VAULT,
|
|
||||||
}
|
|
||||||
for account in accounts
|
|
||||||
]
|
|
||||||
|
|
||||||
accounts = response[API_ACCOUNTS]
|
accounts = response[API_ACCOUNTS]
|
||||||
while response["has_next"]:
|
while response["has_next"]:
|
||||||
response = client.get_accounts(cursor=response["cursor"])
|
response = client.get_accounts(cursor=response["cursor"])
|
||||||
@@ -153,37 +103,28 @@ def get_accounts(client, version):
|
|||||||
class CoinbaseData:
|
class CoinbaseData:
|
||||||
"""Get the latest data and update the states."""
|
"""Get the latest data and update the states."""
|
||||||
|
|
||||||
def __init__(self, client, exchange_base, version):
|
def __init__(self, client, exchange_base):
|
||||||
"""Init the coinbase data object."""
|
"""Init the coinbase data object."""
|
||||||
|
|
||||||
self.client = client
|
self.client = client
|
||||||
self.accounts = None
|
self.accounts = None
|
||||||
self.exchange_base = exchange_base
|
self.exchange_base = exchange_base
|
||||||
self.exchange_rates = None
|
self.exchange_rates = None
|
||||||
if version == "v2":
|
self.user_id = (
|
||||||
self.user_id = self.client.get_current_user()[API_ACCOUNT_ID]
|
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
|
||||||
else:
|
)
|
||||||
self.user_id = (
|
|
||||||
"v3_" + client.get_portfolios()["portfolios"][0][API_V3_ACCOUNT_ID]
|
|
||||||
)
|
|
||||||
self.api_version = version
|
|
||||||
|
|
||||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||||
def update(self):
|
def update(self):
|
||||||
"""Get the latest data from coinbase."""
|
"""Get the latest data from coinbase."""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.accounts = get_accounts(self.client, self.api_version)
|
self.accounts = get_accounts(self.client)
|
||||||
if self.api_version == "v2":
|
self.exchange_rates = self.client.get(
|
||||||
self.exchange_rates = self.client.get_exchange_rates(
|
"/v2/exchange-rates",
|
||||||
currency=self.exchange_base
|
params={API_RATES_CURRENCY: self.exchange_base},
|
||||||
)
|
)[API_DATA]
|
||||||
else:
|
except HTTPError as coinbase_error:
|
||||||
self.exchange_rates = self.client.get(
|
|
||||||
"/v2/exchange-rates",
|
|
||||||
params={API_RATES_CURRENCY: self.exchange_base},
|
|
||||||
)[API_DATA]
|
|
||||||
except (AuthenticationError, HTTPError) as coinbase_error:
|
|
||||||
_LOGGER.error(
|
_LOGGER.error(
|
||||||
"Authentication error connecting to coinbase: %s", coinbase_error
|
"Authentication error connecting to coinbase: %s", coinbase_error
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -2,17 +2,20 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections.abc import Mapping
|
||||||
import logging
|
import logging
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
from coinbase.rest import RESTClient
|
from coinbase.rest import RESTClient
|
||||||
from coinbase.rest.rest_base import HTTPError
|
from coinbase.rest.rest_base import HTTPError
|
||||||
from coinbase.wallet.client import Client as LegacyClient
|
|
||||||
from coinbase.wallet.error import AuthenticationError
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult, OptionsFlow
|
from homeassistant.config_entries import (
|
||||||
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION
|
ConfigFlow,
|
||||||
|
ConfigFlowResult,
|
||||||
|
OptionsFlowWithReload,
|
||||||
|
)
|
||||||
|
from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN
|
||||||
from homeassistant.core import HomeAssistant, callback
|
from homeassistant.core import HomeAssistant, callback
|
||||||
from homeassistant.exceptions import HomeAssistantError
|
from homeassistant.exceptions import HomeAssistantError
|
||||||
from homeassistant.helpers import config_validation as cv
|
from homeassistant.helpers import config_validation as cv
|
||||||
@@ -45,9 +48,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
|||||||
|
|
||||||
def get_user_from_client(api_key, api_token):
|
def get_user_from_client(api_key, api_token):
|
||||||
"""Get the user name from Coinbase API credentials."""
|
"""Get the user name from Coinbase API credentials."""
|
||||||
if "organizations" not in api_key:
|
|
||||||
client = LegacyClient(api_key, api_token)
|
|
||||||
return client.get_current_user()["name"]
|
|
||||||
client = RESTClient(api_key=api_key, api_secret=api_token)
|
client = RESTClient(api_key=api_key, api_secret=api_token)
|
||||||
return client.get_portfolios()["portfolios"][0]["name"]
|
return client.get_portfolios()["portfolios"][0]["name"]
|
||||||
|
|
||||||
@@ -59,7 +59,7 @@ async def validate_api(hass: HomeAssistant, data):
|
|||||||
user = await hass.async_add_executor_job(
|
user = await hass.async_add_executor_job(
|
||||||
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
|
get_user_from_client, data[CONF_API_KEY], data[CONF_API_TOKEN]
|
||||||
)
|
)
|
||||||
except (AuthenticationError, HTTPError) as error:
|
except HTTPError as error:
|
||||||
if "api key" in str(error) or " 401 Client Error" in str(error):
|
if "api key" in str(error) or " 401 Client Error" in str(error):
|
||||||
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
|
_LOGGER.debug("Coinbase rejected API credentials due to an invalid API key")
|
||||||
raise InvalidKey from error
|
raise InvalidKey from error
|
||||||
@@ -74,8 +74,8 @@ async def validate_api(hass: HomeAssistant, data):
|
|||||||
raise InvalidAuth from error
|
raise InvalidAuth from error
|
||||||
except ConnectionError as error:
|
except ConnectionError as error:
|
||||||
raise CannotConnect from error
|
raise CannotConnect from error
|
||||||
api_version = "v3" if "organizations" in data[CONF_API_KEY] else "v2"
|
|
||||||
return {"title": user, "api_version": api_version}
|
return {"title": user}
|
||||||
|
|
||||||
|
|
||||||
async def validate_options(
|
async def validate_options(
|
||||||
@@ -85,20 +85,17 @@ async def validate_options(
|
|||||||
|
|
||||||
client = config_entry.runtime_data.client
|
client = config_entry.runtime_data.client
|
||||||
|
|
||||||
accounts = await hass.async_add_executor_job(
|
accounts = await hass.async_add_executor_job(get_accounts, client)
|
||||||
get_accounts, client, config_entry.data.get("api_version", "v2")
|
|
||||||
)
|
|
||||||
|
|
||||||
accounts_currencies = [
|
accounts_currencies = [
|
||||||
account[API_ACCOUNT_CURRENCY]
|
account[API_ACCOUNT_CURRENCY]
|
||||||
for account in accounts
|
for account in accounts
|
||||||
if not account[ACCOUNT_IS_VAULT]
|
if not account[ACCOUNT_IS_VAULT]
|
||||||
]
|
]
|
||||||
if config_entry.data.get("api_version", "v2") == "v2":
|
|
||||||
available_rates = await hass.async_add_executor_job(client.get_exchange_rates)
|
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
|
||||||
else:
|
available_rates = resp[API_DATA]
|
||||||
resp = await hass.async_add_executor_job(client.get, "/v2/exchange-rates")
|
|
||||||
available_rates = resp[API_DATA]
|
|
||||||
if CONF_CURRENCIES in options:
|
if CONF_CURRENCIES in options:
|
||||||
for currency in options[CONF_CURRENCIES]:
|
for currency in options[CONF_CURRENCIES]:
|
||||||
if currency not in accounts_currencies:
|
if currency not in accounts_currencies:
|
||||||
@@ -117,6 +114,8 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
|
|
||||||
VERSION = 1
|
VERSION = 1
|
||||||
|
|
||||||
|
reauth_entry: CoinbaseConfigEntry
|
||||||
|
|
||||||
async def async_step_user(
|
async def async_step_user(
|
||||||
self, user_input: dict[str, str] | None = None
|
self, user_input: dict[str, str] | None = None
|
||||||
) -> ConfigFlowResult:
|
) -> ConfigFlowResult:
|
||||||
@@ -143,12 +142,63 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
_LOGGER.exception("Unexpected exception")
|
_LOGGER.exception("Unexpected exception")
|
||||||
errors["base"] = "unknown"
|
errors["base"] = "unknown"
|
||||||
else:
|
else:
|
||||||
user_input[CONF_API_VERSION] = info["api_version"]
|
|
||||||
return self.async_create_entry(title=info["title"], data=user_input)
|
return self.async_create_entry(title=info["title"], data=user_input)
|
||||||
return self.async_show_form(
|
return self.async_show_form(
|
||||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def async_step_reauth(
|
||||||
|
self, entry_data: Mapping[str, Any]
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reauthentication flow."""
|
||||||
|
self.reauth_entry = self._get_reauth_entry()
|
||||||
|
return await self.async_step_reauth_confirm()
|
||||||
|
|
||||||
|
async def async_step_reauth_confirm(
|
||||||
|
self, user_input: dict[str, str] | None = None
|
||||||
|
) -> ConfigFlowResult:
|
||||||
|
"""Handle reauthentication confirmation."""
|
||||||
|
errors: dict[str, str] = {}
|
||||||
|
|
||||||
|
if user_input is None:
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=STEP_USER_DATA_SCHEMA,
|
||||||
|
description_placeholders={
|
||||||
|
"account_name": self.reauth_entry.title,
|
||||||
|
},
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
await validate_api(self.hass, user_input)
|
||||||
|
except CannotConnect:
|
||||||
|
errors["base"] = "cannot_connect"
|
||||||
|
except InvalidKey:
|
||||||
|
errors["base"] = "invalid_auth_key"
|
||||||
|
except InvalidSecret:
|
||||||
|
errors["base"] = "invalid_auth_secret"
|
||||||
|
except InvalidAuth:
|
||||||
|
errors["base"] = "invalid_auth"
|
||||||
|
except Exception:
|
||||||
|
_LOGGER.exception("Unexpected exception")
|
||||||
|
errors["base"] = "unknown"
|
||||||
|
else:
|
||||||
|
return self.async_update_reload_and_abort(
|
||||||
|
self.reauth_entry,
|
||||||
|
data_updates=user_input,
|
||||||
|
reason="reauth_successful",
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.async_show_form(
|
||||||
|
step_id="reauth_confirm",
|
||||||
|
data_schema=STEP_USER_DATA_SCHEMA,
|
||||||
|
description_placeholders={
|
||||||
|
"account_name": self.reauth_entry.title,
|
||||||
|
},
|
||||||
|
errors=errors,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@callback
|
@callback
|
||||||
def async_get_options_flow(
|
def async_get_options_flow(
|
||||||
@@ -158,7 +208,7 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
|||||||
return OptionsFlowHandler()
|
return OptionsFlowHandler()
|
||||||
|
|
||||||
|
|
||||||
class OptionsFlowHandler(OptionsFlow):
|
class OptionsFlowHandler(OptionsFlowWithReload):
|
||||||
"""Handle a option flow for Coinbase."""
|
"""Handle a option flow for Coinbase."""
|
||||||
|
|
||||||
async def async_step_init(
|
async def async_step_init(
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/coinbase",
|
"documentation": "https://www.home-assistant.io/integrations/coinbase",
|
||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["coinbase"],
|
"loggers": ["coinbase"],
|
||||||
"requirements": ["coinbase==2.1.0", "coinbase-advanced-py==1.2.2"]
|
"requirements": ["coinbase-advanced-py==1.2.2"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -6,6 +6,7 @@ import logging
|
|||||||
|
|
||||||
from homeassistant.components.sensor import SensorEntity, SensorStateClass
|
from homeassistant.components.sensor import SensorEntity, SensorStateClass
|
||||||
from homeassistant.core import HomeAssistant
|
from homeassistant.core import HomeAssistant
|
||||||
|
from homeassistant.helpers import entity_registry as er
|
||||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||||
|
|
||||||
@@ -27,7 +28,6 @@ from .const import (
|
|||||||
_LOGGER = logging.getLogger(__name__)
|
_LOGGER = logging.getLogger(__name__)
|
||||||
|
|
||||||
ATTR_NATIVE_BALANCE = "Balance in native currency"
|
ATTR_NATIVE_BALANCE = "Balance in native currency"
|
||||||
ATTR_API_VERSION = "API Version"
|
|
||||||
|
|
||||||
CURRENCY_ICONS = {
|
CURRENCY_ICONS = {
|
||||||
"BTC": "mdi:currency-btc",
|
"BTC": "mdi:currency-btc",
|
||||||
@@ -69,11 +69,26 @@ async def async_setup_entry(
|
|||||||
CONF_EXCHANGE_PRECISION, CONF_EXCHANGE_PRECISION_DEFAULT
|
CONF_EXCHANGE_PRECISION, CONF_EXCHANGE_PRECISION_DEFAULT
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# Remove orphaned entities
|
||||||
|
registry = er.async_get(hass)
|
||||||
|
existing_entities = er.async_entries_for_config_entry(
|
||||||
|
registry, config_entry.entry_id
|
||||||
|
)
|
||||||
|
for entity in existing_entities:
|
||||||
|
currency = entity.unique_id.split("-")[-1]
|
||||||
|
if (
|
||||||
|
"xe" in entity.unique_id
|
||||||
|
and currency not in config_entry.options.get(CONF_EXCHANGE_RATES, [])
|
||||||
|
) or (
|
||||||
|
"wallet" in entity.unique_id
|
||||||
|
and currency not in config_entry.options.get(CONF_CURRENCIES, [])
|
||||||
|
):
|
||||||
|
registry.async_remove(entity.entity_id)
|
||||||
|
|
||||||
for currency in desired_currencies:
|
for currency in desired_currencies:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Attempting to set up %s account sensor with %s API",
|
"Attempting to set up %s account sensor",
|
||||||
currency,
|
currency,
|
||||||
instance.api_version,
|
|
||||||
)
|
)
|
||||||
if currency not in provided_currencies:
|
if currency not in provided_currencies:
|
||||||
_LOGGER.warning(
|
_LOGGER.warning(
|
||||||
@@ -89,9 +104,8 @@ async def async_setup_entry(
|
|||||||
if CONF_EXCHANGE_RATES in config_entry.options:
|
if CONF_EXCHANGE_RATES in config_entry.options:
|
||||||
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
|
for rate in config_entry.options[CONF_EXCHANGE_RATES]:
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Attempting to set up %s account sensor with %s API",
|
"Attempting to set up %s exchange rate sensor",
|
||||||
rate,
|
rate,
|
||||||
instance.api_version,
|
|
||||||
)
|
)
|
||||||
entities.append(
|
entities.append(
|
||||||
ExchangeRateSensor(
|
ExchangeRateSensor(
|
||||||
@@ -146,15 +160,13 @@ class AccountSensor(SensorEntity):
|
|||||||
"""Return the state attributes of the sensor."""
|
"""Return the state attributes of the sensor."""
|
||||||
return {
|
return {
|
||||||
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
|
ATTR_NATIVE_BALANCE: f"{self._native_balance} {self._coinbase_data.exchange_base}",
|
||||||
ATTR_API_VERSION: self._coinbase_data.api_version,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
"""Get the latest state of the sensor."""
|
"""Get the latest state of the sensor."""
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Updating %s account sensor with %s API",
|
"Updating %s account sensor",
|
||||||
self._currency,
|
self._currency,
|
||||||
self._coinbase_data.api_version,
|
|
||||||
)
|
)
|
||||||
self._coinbase_data.update()
|
self._coinbase_data.update()
|
||||||
for account in self._coinbase_data.accounts:
|
for account in self._coinbase_data.accounts:
|
||||||
@@ -210,9 +222,8 @@ class ExchangeRateSensor(SensorEntity):
|
|||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
"""Get the latest state of the sensor."""
|
"""Get the latest state of the sensor."""
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Updating %s rate sensor with %s API",
|
"Updating %s rate sensor",
|
||||||
self._currency,
|
self._currency,
|
||||||
self._coinbase_data.api_version,
|
|
||||||
)
|
)
|
||||||
self._coinbase_data.update()
|
self._coinbase_data.update()
|
||||||
self._attr_native_value = round(
|
self._attr_native_value = round(
|
||||||
|
|||||||
@@ -8,6 +8,14 @@
|
|||||||
"api_key": "[%key:common::config_flow::data::api_key%]",
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
"api_token": "API secret"
|
"api_token": "API secret"
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"reauth_confirm": {
|
||||||
|
"title": "Update Coinbase API credentials",
|
||||||
|
"description": "Your current Coinbase API key appears to be for the deprecated v2 API. Please reconfigure with a new API key created for the v3 API. Visit https://www.coinbase.com/developer-platform to create new credentials for {account_name}.",
|
||||||
|
"data": {
|
||||||
|
"api_key": "[%key:common::config_flow::data::api_key%]",
|
||||||
|
"api_token": "API secret"
|
||||||
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"error": {
|
"error": {
|
||||||
@@ -18,7 +26,8 @@
|
|||||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||||
},
|
},
|
||||||
"abort": {
|
"abort": {
|
||||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||||
|
"reauth_successful": "Successfully updated credentials"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"options": {
|
"options": {
|
||||||
|
|||||||
@@ -40,6 +40,7 @@ from .chat_log import (
|
|||||||
ConverseError,
|
ConverseError,
|
||||||
SystemContent,
|
SystemContent,
|
||||||
ToolResultContent,
|
ToolResultContent,
|
||||||
|
ToolResultContentDeltaDict,
|
||||||
UserContent,
|
UserContent,
|
||||||
async_get_chat_log,
|
async_get_chat_log,
|
||||||
)
|
)
|
||||||
@@ -79,6 +80,7 @@ __all__ = [
|
|||||||
"ConverseError",
|
"ConverseError",
|
||||||
"SystemContent",
|
"SystemContent",
|
||||||
"ToolResultContent",
|
"ToolResultContent",
|
||||||
|
"ToolResultContentDeltaDict",
|
||||||
"UserContent",
|
"UserContent",
|
||||||
"async_conversation_trace_append",
|
"async_conversation_trace_append",
|
||||||
"async_converse",
|
"async_converse",
|
||||||
@@ -117,7 +119,7 @@ CONFIG_SCHEMA = vol.Schema(
|
|||||||
{cv.string: vol.All(cv.ensure_list, [cv.string])}
|
{cv.string: vol.All(cv.ensure_list, [cv.string])}
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
)
|
),
|
||||||
},
|
},
|
||||||
extra=vol.ALLOW_EXTRA,
|
extra=vol.ALLOW_EXTRA,
|
||||||
)
|
)
|
||||||
@@ -268,8 +270,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
|||||||
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
entity_component = EntityComponent[ConversationEntity](_LOGGER, DOMAIN, hass)
|
||||||
hass.data[DATA_COMPONENT] = entity_component
|
hass.data[DATA_COMPONENT] = entity_component
|
||||||
|
|
||||||
|
agent_config = config.get(DOMAIN, {})
|
||||||
await async_setup_default_agent(
|
await async_setup_default_agent(
|
||||||
hass, entity_component, config.get(DOMAIN, {}).get("intents", {})
|
hass, entity_component, config_intents=agent_config.get("intents", {})
|
||||||
)
|
)
|
||||||
|
|
||||||
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
async def handle_process(service: ServiceCall) -> ServiceResponse:
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ from contextvars import ContextVar
|
|||||||
from dataclasses import asdict, dataclass, field, replace
|
from dataclasses import asdict, dataclass, field, replace
|
||||||
import logging
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import Any, Literal, TypedDict
|
from typing import Any, Literal, TypedDict, cast
|
||||||
|
|
||||||
import voluptuous as vol
|
import voluptuous as vol
|
||||||
|
|
||||||
@@ -161,7 +161,9 @@ class AssistantContent:
|
|||||||
role: Literal["assistant"] = field(init=False, default="assistant")
|
role: Literal["assistant"] = field(init=False, default="assistant")
|
||||||
agent_id: str
|
agent_id: str
|
||||||
content: str | None = None
|
content: str | None = None
|
||||||
|
thinking_content: str | None = None
|
||||||
tool_calls: list[llm.ToolInput] | None = None
|
tool_calls: list[llm.ToolInput] | None = None
|
||||||
|
native: Any = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass(frozen=True)
|
@dataclass(frozen=True)
|
||||||
@@ -183,7 +185,18 @@ class AssistantContentDeltaDict(TypedDict, total=False):
|
|||||||
|
|
||||||
role: Literal["assistant"]
|
role: Literal["assistant"]
|
||||||
content: str | None
|
content: str | None
|
||||||
|
thinking_content: str | None
|
||||||
tool_calls: list[llm.ToolInput] | None
|
tool_calls: list[llm.ToolInput] | None
|
||||||
|
native: Any
|
||||||
|
|
||||||
|
|
||||||
|
class ToolResultContentDeltaDict(TypedDict, total=False):
|
||||||
|
"""Tool result content."""
|
||||||
|
|
||||||
|
role: Literal["tool_result"]
|
||||||
|
tool_call_id: str
|
||||||
|
tool_name: str
|
||||||
|
tool_result: JsonObjectType
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
@@ -231,17 +244,25 @@ class ChatLog:
|
|||||||
|
|
||||||
@callback
|
@callback
|
||||||
def async_add_assistant_content_without_tools(
|
def async_add_assistant_content_without_tools(
|
||||||
self, content: AssistantContent
|
self, content: AssistantContent | ToolResultContent
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Add assistant content to the log."""
|
"""Add assistant content to the log.
|
||||||
|
|
||||||
|
Allows assistant content without tool calls or with external tool calls only,
|
||||||
|
as well as tool results for the external tools.
|
||||||
|
"""
|
||||||
LOGGER.debug("Adding assistant content: %s", content)
|
LOGGER.debug("Adding assistant content: %s", content)
|
||||||
if content.tool_calls is not None:
|
if (
|
||||||
raise ValueError("Tool calls not allowed")
|
isinstance(content, AssistantContent)
|
||||||
|
and content.tool_calls is not None
|
||||||
|
and any(not tool_call.external for tool_call in content.tool_calls)
|
||||||
|
):
|
||||||
|
raise ValueError("Non-external tool calls not allowed")
|
||||||
self.content.append(content)
|
self.content.append(content)
|
||||||
|
|
||||||
async def async_add_assistant_content(
|
async def async_add_assistant_content(
|
||||||
self,
|
self,
|
||||||
content: AssistantContent,
|
content: AssistantContent | ToolResultContent,
|
||||||
/,
|
/,
|
||||||
tool_call_tasks: dict[str, asyncio.Task] | None = None,
|
tool_call_tasks: dict[str, asyncio.Task] | None = None,
|
||||||
) -> AsyncGenerator[ToolResultContent]:
|
) -> AsyncGenerator[ToolResultContent]:
|
||||||
@@ -254,7 +275,11 @@ class ChatLog:
|
|||||||
LOGGER.debug("Adding assistant content: %s", content)
|
LOGGER.debug("Adding assistant content: %s", content)
|
||||||
self.content.append(content)
|
self.content.append(content)
|
||||||
|
|
||||||
if content.tool_calls is None:
|
if (
|
||||||
|
not isinstance(content, AssistantContent)
|
||||||
|
or content.tool_calls is None
|
||||||
|
or all(tool_call.external for tool_call in content.tool_calls)
|
||||||
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.llm_api is None:
|
if self.llm_api is None:
|
||||||
@@ -263,13 +288,16 @@ class ChatLog:
|
|||||||
if tool_call_tasks is None:
|
if tool_call_tasks is None:
|
||||||
tool_call_tasks = {}
|
tool_call_tasks = {}
|
||||||
for tool_input in content.tool_calls:
|
for tool_input in content.tool_calls:
|
||||||
if tool_input.id not in tool_call_tasks:
|
if tool_input.id not in tool_call_tasks and not tool_input.external:
|
||||||
tool_call_tasks[tool_input.id] = self.hass.async_create_task(
|
tool_call_tasks[tool_input.id] = self.hass.async_create_task(
|
||||||
self.llm_api.async_call_tool(tool_input),
|
self.llm_api.async_call_tool(tool_input),
|
||||||
name=f"llm_tool_{tool_input.id}",
|
name=f"llm_tool_{tool_input.id}",
|
||||||
)
|
)
|
||||||
|
|
||||||
for tool_input in content.tool_calls:
|
for tool_input in content.tool_calls:
|
||||||
|
if tool_input.external:
|
||||||
|
continue
|
||||||
|
|
||||||
LOGGER.debug(
|
LOGGER.debug(
|
||||||
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
|
"Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args
|
||||||
)
|
)
|
||||||
@@ -292,7 +320,9 @@ class ChatLog:
|
|||||||
yield response_content
|
yield response_content
|
||||||
|
|
||||||
async def async_add_delta_content_stream(
|
async def async_add_delta_content_stream(
|
||||||
self, agent_id: str, stream: AsyncIterable[AssistantContentDeltaDict]
|
self,
|
||||||
|
agent_id: str,
|
||||||
|
stream: AsyncIterable[AssistantContentDeltaDict | ToolResultContentDeltaDict],
|
||||||
) -> AsyncGenerator[AssistantContent | ToolResultContent]:
|
) -> AsyncGenerator[AssistantContent | ToolResultContent]:
|
||||||
"""Stream content into the chat log.
|
"""Stream content into the chat log.
|
||||||
|
|
||||||
@@ -306,6 +336,8 @@ class ChatLog:
|
|||||||
The keys content and tool_calls will be concatenated if they appear multiple times.
|
The keys content and tool_calls will be concatenated if they appear multiple times.
|
||||||
"""
|
"""
|
||||||
current_content = ""
|
current_content = ""
|
||||||
|
current_thinking_content = ""
|
||||||
|
current_native: Any = None
|
||||||
current_tool_calls: list[llm.ToolInput] = []
|
current_tool_calls: list[llm.ToolInput] = []
|
||||||
tool_call_tasks: dict[str, asyncio.Task] = {}
|
tool_call_tasks: dict[str, asyncio.Task] = {}
|
||||||
|
|
||||||
@@ -314,34 +346,54 @@ class ChatLog:
|
|||||||
|
|
||||||
# Indicates update to current message
|
# Indicates update to current message
|
||||||
if "role" not in delta:
|
if "role" not in delta:
|
||||||
if delta_content := delta.get("content"):
|
# ToolResultContentDeltaDict will always have a role
|
||||||
|
assistant_delta = cast(AssistantContentDeltaDict, delta)
|
||||||
|
if delta_content := assistant_delta.get("content"):
|
||||||
current_content += delta_content
|
current_content += delta_content
|
||||||
if delta_tool_calls := delta.get("tool_calls"):
|
if delta_thinking_content := assistant_delta.get("thinking_content"):
|
||||||
if self.llm_api is None:
|
current_thinking_content += delta_thinking_content
|
||||||
raise ValueError("No LLM API configured")
|
if delta_native := assistant_delta.get("native"):
|
||||||
|
if current_native is not None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Native content already set, cannot overwrite"
|
||||||
|
)
|
||||||
|
current_native = delta_native
|
||||||
|
if delta_tool_calls := assistant_delta.get("tool_calls"):
|
||||||
current_tool_calls += delta_tool_calls
|
current_tool_calls += delta_tool_calls
|
||||||
|
|
||||||
# Start processing the tool calls as soon as we know about them
|
# Start processing the tool calls as soon as we know about them
|
||||||
for tool_call in delta_tool_calls:
|
for tool_call in delta_tool_calls:
|
||||||
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
|
if not tool_call.external:
|
||||||
self.llm_api.async_call_tool(tool_call),
|
if self.llm_api is None:
|
||||||
name=f"llm_tool_{tool_call.id}",
|
raise ValueError("No LLM API configured")
|
||||||
)
|
|
||||||
|
tool_call_tasks[tool_call.id] = self.hass.async_create_task(
|
||||||
|
self.llm_api.async_call_tool(tool_call),
|
||||||
|
name=f"llm_tool_{tool_call.id}",
|
||||||
|
)
|
||||||
if self.delta_listener:
|
if self.delta_listener:
|
||||||
self.delta_listener(self, delta) # type: ignore[arg-type]
|
if filtered_delta := {
|
||||||
|
k: v for k, v in assistant_delta.items() if k != "native"
|
||||||
|
}:
|
||||||
|
# We do not want to send the native content to the listener
|
||||||
|
# as it is not JSON serializable
|
||||||
|
self.delta_listener(self, filtered_delta)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Starting a new message
|
# Starting a new message
|
||||||
|
|
||||||
if delta["role"] != "assistant":
|
|
||||||
raise ValueError(f"Only assistant role expected. Got {delta['role']}")
|
|
||||||
|
|
||||||
# Yield the previous message if it has content
|
# Yield the previous message if it has content
|
||||||
if current_content or current_tool_calls:
|
if (
|
||||||
content = AssistantContent(
|
current_content
|
||||||
|
or current_thinking_content
|
||||||
|
or current_tool_calls
|
||||||
|
or current_native
|
||||||
|
):
|
||||||
|
content: AssistantContent | ToolResultContent = AssistantContent(
|
||||||
agent_id=agent_id,
|
agent_id=agent_id,
|
||||||
content=current_content or None,
|
content=current_content or None,
|
||||||
|
thinking_content=current_thinking_content or None,
|
||||||
tool_calls=current_tool_calls or None,
|
tool_calls=current_tool_calls or None,
|
||||||
|
native=current_native,
|
||||||
)
|
)
|
||||||
yield content
|
yield content
|
||||||
async for tool_result in self.async_add_assistant_content(
|
async for tool_result in self.async_add_assistant_content(
|
||||||
@@ -350,18 +402,51 @@ class ChatLog:
|
|||||||
yield tool_result
|
yield tool_result
|
||||||
if self.delta_listener:
|
if self.delta_listener:
|
||||||
self.delta_listener(self, asdict(tool_result))
|
self.delta_listener(self, asdict(tool_result))
|
||||||
|
current_content = ""
|
||||||
|
current_thinking_content = ""
|
||||||
|
current_native = None
|
||||||
|
current_tool_calls = []
|
||||||
|
|
||||||
current_content = delta.get("content") or ""
|
if delta["role"] == "assistant":
|
||||||
current_tool_calls = delta.get("tool_calls") or []
|
current_content = delta.get("content") or ""
|
||||||
|
current_thinking_content = delta.get("thinking_content") or ""
|
||||||
|
current_tool_calls = delta.get("tool_calls") or []
|
||||||
|
current_native = delta.get("native")
|
||||||
|
|
||||||
if self.delta_listener:
|
if self.delta_listener:
|
||||||
self.delta_listener(self, delta) # type: ignore[arg-type]
|
if filtered_delta := {
|
||||||
|
k: v for k, v in delta.items() if k != "native"
|
||||||
|
}:
|
||||||
|
self.delta_listener(self, filtered_delta)
|
||||||
|
elif delta["role"] == "tool_result":
|
||||||
|
content = ToolResultContent(
|
||||||
|
agent_id=agent_id,
|
||||||
|
tool_call_id=delta["tool_call_id"],
|
||||||
|
tool_name=delta["tool_name"],
|
||||||
|
tool_result=delta["tool_result"],
|
||||||
|
)
|
||||||
|
yield content
|
||||||
|
if self.delta_listener:
|
||||||
|
self.delta_listener(self, asdict(content))
|
||||||
|
self.async_add_assistant_content_without_tools(content)
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
"Only assistant and tool_result roles expected."
|
||||||
|
f" Got {delta['role']}"
|
||||||
|
)
|
||||||
|
|
||||||
if current_content or current_tool_calls:
|
if (
|
||||||
|
current_content
|
||||||
|
or current_thinking_content
|
||||||
|
or current_tool_calls
|
||||||
|
or current_native
|
||||||
|
):
|
||||||
content = AssistantContent(
|
content = AssistantContent(
|
||||||
agent_id=agent_id,
|
agent_id=agent_id,
|
||||||
content=current_content or None,
|
content=current_content or None,
|
||||||
|
thinking_content=current_thinking_content or None,
|
||||||
tool_calls=current_tool_calls or None,
|
tool_calls=current_tool_calls or None,
|
||||||
|
native=current_native,
|
||||||
)
|
)
|
||||||
yield content
|
yield content
|
||||||
async for tool_result in self.async_add_assistant_content(
|
async for tool_result in self.async_add_assistant_content(
|
||||||
|
|||||||
@@ -14,14 +14,19 @@ import re
|
|||||||
import time
|
import time
|
||||||
from typing import IO, Any, cast
|
from typing import IO, Any, cast
|
||||||
|
|
||||||
from hassil.expression import Expression, ListReference, Sequence, TextChunk
|
from hassil.expression import Expression, Group, ListReference, TextChunk
|
||||||
|
from hassil.fuzzy import FuzzyNgramMatcher, SlotCombinationInfo
|
||||||
from hassil.intents import (
|
from hassil.intents import (
|
||||||
|
Intent,
|
||||||
|
IntentData,
|
||||||
Intents,
|
Intents,
|
||||||
SlotList,
|
SlotList,
|
||||||
TextSlotList,
|
TextSlotList,
|
||||||
TextSlotValue,
|
TextSlotValue,
|
||||||
WildcardSlotList,
|
WildcardSlotList,
|
||||||
)
|
)
|
||||||
|
from hassil.models import MatchEntity
|
||||||
|
from hassil.ngram import Sqlite3NgramModel
|
||||||
from hassil.recognize import (
|
from hassil.recognize import (
|
||||||
MISSING_ENTITY,
|
MISSING_ENTITY,
|
||||||
RecognizeResult,
|
RecognizeResult,
|
||||||
@@ -31,7 +36,15 @@ from hassil.recognize import (
|
|||||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||||
from hassil.trie import Trie
|
from hassil.trie import Trie
|
||||||
from hassil.util import merge_dict
|
from hassil.util import merge_dict
|
||||||
from home_assistant_intents import ErrorKey, get_intents, get_languages
|
from home_assistant_intents import (
|
||||||
|
ErrorKey,
|
||||||
|
FuzzyConfig,
|
||||||
|
FuzzyLanguageResponses,
|
||||||
|
get_fuzzy_config,
|
||||||
|
get_fuzzy_language,
|
||||||
|
get_intents,
|
||||||
|
get_languages,
|
||||||
|
)
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from homeassistant import core
|
from homeassistant import core
|
||||||
@@ -76,6 +89,7 @@ TRIGGER_CALLBACK_TYPE = Callable[
|
|||||||
]
|
]
|
||||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||||
|
METADATA_FUZZY_MATCH = "hass_fuzzy_match"
|
||||||
|
|
||||||
ERROR_SENTINEL = object()
|
ERROR_SENTINEL = object()
|
||||||
|
|
||||||
@@ -94,6 +108,8 @@ class LanguageIntents:
|
|||||||
intent_responses: dict[str, Any]
|
intent_responses: dict[str, Any]
|
||||||
error_responses: dict[str, Any]
|
error_responses: dict[str, Any]
|
||||||
language_variant: str | None
|
language_variant: str | None
|
||||||
|
fuzzy_matcher: FuzzyNgramMatcher | None = None
|
||||||
|
fuzzy_responses: FuzzyLanguageResponses | None = None
|
||||||
|
|
||||||
|
|
||||||
@dataclass(slots=True)
|
@dataclass(slots=True)
|
||||||
@@ -119,10 +135,13 @@ class IntentMatchingStage(Enum):
|
|||||||
EXPOSED_ENTITIES_ONLY = auto()
|
EXPOSED_ENTITIES_ONLY = auto()
|
||||||
"""Match against exposed entities only."""
|
"""Match against exposed entities only."""
|
||||||
|
|
||||||
|
FUZZY = auto()
|
||||||
|
"""Use fuzzy matching to guess intent."""
|
||||||
|
|
||||||
UNEXPOSED_ENTITIES = auto()
|
UNEXPOSED_ENTITIES = auto()
|
||||||
"""Match against unexposed entities in Home Assistant."""
|
"""Match against unexposed entities in Home Assistant."""
|
||||||
|
|
||||||
FUZZY = auto()
|
UNKNOWN_NAMES = auto()
|
||||||
"""Capture names that are not known to Home Assistant."""
|
"""Capture names that are not known to Home Assistant."""
|
||||||
|
|
||||||
|
|
||||||
@@ -241,6 +260,10 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# LRU cache to avoid unnecessary intent matching
|
# LRU cache to avoid unnecessary intent matching
|
||||||
self._intent_cache = IntentCache(capacity=128)
|
self._intent_cache = IntentCache(capacity=128)
|
||||||
|
|
||||||
|
# Shared configuration for fuzzy matching
|
||||||
|
self.fuzzy_matching = True
|
||||||
|
self._fuzzy_config: FuzzyConfig | None = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def supported_languages(self) -> list[str]:
|
def supported_languages(self) -> list[str]:
|
||||||
"""Return a list of supported languages."""
|
"""Return a list of supported languages."""
|
||||||
@@ -299,7 +322,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
_LOGGER.warning("No intents were loaded for language: %s", language)
|
_LOGGER.warning("No intents were loaded for language: %s", language)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
slot_lists = self._make_slot_lists()
|
slot_lists = await self._make_slot_lists()
|
||||||
intent_context = self._make_intent_context(user_input)
|
intent_context = self._make_intent_context(user_input)
|
||||||
|
|
||||||
if self._exposed_names_trie is not None:
|
if self._exposed_names_trie is not None:
|
||||||
@@ -556,6 +579,36 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# Don't try matching against all entities or doing a fuzzy match
|
# Don't try matching against all entities or doing a fuzzy match
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
# Use fuzzy matching
|
||||||
|
skip_fuzzy_match = False
|
||||||
|
if cache_value is not None:
|
||||||
|
if (cache_value.result is not None) and (
|
||||||
|
cache_value.stage == IntentMatchingStage.FUZZY
|
||||||
|
):
|
||||||
|
_LOGGER.debug("Got cached result for fuzzy match")
|
||||||
|
return cache_value.result
|
||||||
|
|
||||||
|
# Continue with matching, but we know we won't succeed for fuzzy
|
||||||
|
# match.
|
||||||
|
skip_fuzzy_match = True
|
||||||
|
|
||||||
|
if (not skip_fuzzy_match) and self.fuzzy_matching:
|
||||||
|
start_time = time.monotonic()
|
||||||
|
fuzzy_result = self._recognize_fuzzy(lang_intents, user_input)
|
||||||
|
|
||||||
|
# Update cache
|
||||||
|
self._intent_cache.put(
|
||||||
|
cache_key,
|
||||||
|
IntentCacheValue(result=fuzzy_result, stage=IntentMatchingStage.FUZZY),
|
||||||
|
)
|
||||||
|
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
|
||||||
|
)
|
||||||
|
|
||||||
|
if fuzzy_result is not None:
|
||||||
|
return fuzzy_result
|
||||||
|
|
||||||
# Try again with all entities (including unexposed)
|
# Try again with all entities (including unexposed)
|
||||||
skip_unexposed_entities_match = False
|
skip_unexposed_entities_match = False
|
||||||
if cache_value is not None:
|
if cache_value is not None:
|
||||||
@@ -601,102 +654,160 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# This should fail the intent handling phase (async_match_targets).
|
# This should fail the intent handling phase (async_match_targets).
|
||||||
return strict_result
|
return strict_result
|
||||||
|
|
||||||
# Try again with missing entities enabled
|
# Check unknown names
|
||||||
skip_fuzzy_match = False
|
skip_unknown_names = False
|
||||||
if cache_value is not None:
|
if cache_value is not None:
|
||||||
if (cache_value.result is not None) and (
|
if (cache_value.result is not None) and (
|
||||||
cache_value.stage == IntentMatchingStage.FUZZY
|
cache_value.stage == IntentMatchingStage.UNKNOWN_NAMES
|
||||||
):
|
):
|
||||||
_LOGGER.debug("Got cached result for fuzzy match")
|
_LOGGER.debug("Got cached result for unknown names")
|
||||||
return cache_value.result
|
return cache_value.result
|
||||||
|
|
||||||
# We know we won't succeed for fuzzy matching.
|
skip_unknown_names = True
|
||||||
skip_fuzzy_match = True
|
|
||||||
|
|
||||||
maybe_result: RecognizeResult | None = None
|
maybe_result: RecognizeResult | None = None
|
||||||
if not skip_fuzzy_match:
|
if not skip_unknown_names:
|
||||||
start_time = time.monotonic()
|
start_time = time.monotonic()
|
||||||
best_num_matched_entities = 0
|
maybe_result = self._recognize_unknown_names(
|
||||||
best_num_unmatched_entities = 0
|
lang_intents, user_input, slot_lists, intent_context
|
||||||
best_num_unmatched_ranges = 0
|
)
|
||||||
for result in recognize_all(
|
|
||||||
user_input.text,
|
|
||||||
lang_intents.intents,
|
|
||||||
slot_lists=slot_lists,
|
|
||||||
intent_context=intent_context,
|
|
||||||
allow_unmatched_entities=True,
|
|
||||||
):
|
|
||||||
if result.text_chunks_matched < 1:
|
|
||||||
# Skip results that don't match any literal text
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Don't count missing entities that couldn't be filled from context
|
|
||||||
num_matched_entities = 0
|
|
||||||
for matched_entity in result.entities_list:
|
|
||||||
if matched_entity.name not in result.unmatched_entities:
|
|
||||||
num_matched_entities += 1
|
|
||||||
|
|
||||||
num_unmatched_entities = 0
|
|
||||||
num_unmatched_ranges = 0
|
|
||||||
for unmatched_entity in result.unmatched_entities_list:
|
|
||||||
if isinstance(unmatched_entity, UnmatchedTextEntity):
|
|
||||||
if unmatched_entity.text != MISSING_ENTITY:
|
|
||||||
num_unmatched_entities += 1
|
|
||||||
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
|
|
||||||
num_unmatched_ranges += 1
|
|
||||||
num_unmatched_entities += 1
|
|
||||||
else:
|
|
||||||
num_unmatched_entities += 1
|
|
||||||
|
|
||||||
if (
|
|
||||||
(maybe_result is None) # first result
|
|
||||||
or (
|
|
||||||
# More literal text matched
|
|
||||||
result.text_chunks_matched > maybe_result.text_chunks_matched
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
# More entities matched
|
|
||||||
num_matched_entities > best_num_matched_entities
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
# Fewer unmatched entities
|
|
||||||
(num_matched_entities == best_num_matched_entities)
|
|
||||||
and (num_unmatched_entities < best_num_unmatched_entities)
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
# Prefer unmatched ranges
|
|
||||||
(num_matched_entities == best_num_matched_entities)
|
|
||||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
|
||||||
and (num_unmatched_ranges > best_num_unmatched_ranges)
|
|
||||||
)
|
|
||||||
or (
|
|
||||||
# Prefer match failures with entities
|
|
||||||
(result.text_chunks_matched == maybe_result.text_chunks_matched)
|
|
||||||
and (num_unmatched_entities == best_num_unmatched_entities)
|
|
||||||
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
|
||||||
and (
|
|
||||||
("name" in result.entities)
|
|
||||||
or ("name" in result.unmatched_entities)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
):
|
|
||||||
maybe_result = result
|
|
||||||
best_num_matched_entities = num_matched_entities
|
|
||||||
best_num_unmatched_entities = num_unmatched_entities
|
|
||||||
best_num_unmatched_ranges = num_unmatched_ranges
|
|
||||||
|
|
||||||
# Update cache
|
# Update cache
|
||||||
self._intent_cache.put(
|
self._intent_cache.put(
|
||||||
cache_key,
|
cache_key,
|
||||||
IntentCacheValue(result=maybe_result, stage=IntentMatchingStage.FUZZY),
|
IntentCacheValue(
|
||||||
|
result=maybe_result, stage=IntentMatchingStage.UNKNOWN_NAMES
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
"Did fuzzy match in %s second(s)", time.monotonic() - start_time
|
"Did unknown names match in %s second(s)", time.monotonic() - start_time
|
||||||
)
|
)
|
||||||
|
|
||||||
return maybe_result
|
return maybe_result
|
||||||
|
|
||||||
|
def _recognize_fuzzy(
|
||||||
|
self, lang_intents: LanguageIntents, user_input: ConversationInput
|
||||||
|
) -> RecognizeResult | None:
|
||||||
|
"""Return fuzzy recognition from hassil."""
|
||||||
|
if lang_intents.fuzzy_matcher is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
fuzzy_result = lang_intents.fuzzy_matcher.match(user_input.text)
|
||||||
|
if fuzzy_result is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
response = "default"
|
||||||
|
if lang_intents.fuzzy_responses:
|
||||||
|
domain = "" # no domain
|
||||||
|
if "name" in fuzzy_result.slots:
|
||||||
|
domain = fuzzy_result.name_domain
|
||||||
|
elif "domain" in fuzzy_result.slots:
|
||||||
|
domain = fuzzy_result.slots["domain"].value
|
||||||
|
|
||||||
|
slot_combo = tuple(sorted(fuzzy_result.slots))
|
||||||
|
if (
|
||||||
|
intent_responses := lang_intents.fuzzy_responses.get(
|
||||||
|
fuzzy_result.intent_name
|
||||||
|
)
|
||||||
|
) and (combo_responses := intent_responses.get(slot_combo)):
|
||||||
|
response = combo_responses.get(domain, response)
|
||||||
|
|
||||||
|
entities = [
|
||||||
|
MatchEntity(name=slot_name, value=slot_value.value, text=slot_value.text)
|
||||||
|
for slot_name, slot_value in fuzzy_result.slots.items()
|
||||||
|
]
|
||||||
|
|
||||||
|
return RecognizeResult(
|
||||||
|
intent=Intent(name=fuzzy_result.intent_name),
|
||||||
|
intent_data=IntentData(sentence_texts=[]),
|
||||||
|
intent_metadata={METADATA_FUZZY_MATCH: True},
|
||||||
|
entities={entity.name: entity for entity in entities},
|
||||||
|
entities_list=entities,
|
||||||
|
response=response,
|
||||||
|
)
|
||||||
|
|
||||||
|
def _recognize_unknown_names(
|
||||||
|
self,
|
||||||
|
lang_intents: LanguageIntents,
|
||||||
|
user_input: ConversationInput,
|
||||||
|
slot_lists: dict[str, SlotList],
|
||||||
|
intent_context: dict[str, Any] | None,
|
||||||
|
) -> RecognizeResult | None:
|
||||||
|
"""Return result with unknown names for an error message."""
|
||||||
|
maybe_result: RecognizeResult | None = None
|
||||||
|
|
||||||
|
best_num_matched_entities = 0
|
||||||
|
best_num_unmatched_entities = 0
|
||||||
|
best_num_unmatched_ranges = 0
|
||||||
|
for result in recognize_all(
|
||||||
|
user_input.text,
|
||||||
|
lang_intents.intents,
|
||||||
|
slot_lists=slot_lists,
|
||||||
|
intent_context=intent_context,
|
||||||
|
allow_unmatched_entities=True,
|
||||||
|
):
|
||||||
|
if result.text_chunks_matched < 1:
|
||||||
|
# Skip results that don't match any literal text
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Don't count missing entities that couldn't be filled from context
|
||||||
|
num_matched_entities = 0
|
||||||
|
for matched_entity in result.entities_list:
|
||||||
|
if matched_entity.name not in result.unmatched_entities:
|
||||||
|
num_matched_entities += 1
|
||||||
|
|
||||||
|
num_unmatched_entities = 0
|
||||||
|
num_unmatched_ranges = 0
|
||||||
|
for unmatched_entity in result.unmatched_entities_list:
|
||||||
|
if isinstance(unmatched_entity, UnmatchedTextEntity):
|
||||||
|
if unmatched_entity.text != MISSING_ENTITY:
|
||||||
|
num_unmatched_entities += 1
|
||||||
|
elif isinstance(unmatched_entity, UnmatchedRangeEntity):
|
||||||
|
num_unmatched_ranges += 1
|
||||||
|
num_unmatched_entities += 1
|
||||||
|
else:
|
||||||
|
num_unmatched_entities += 1
|
||||||
|
|
||||||
|
if (
|
||||||
|
(maybe_result is None) # first result
|
||||||
|
or (
|
||||||
|
# More literal text matched
|
||||||
|
result.text_chunks_matched > maybe_result.text_chunks_matched
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
# More entities matched
|
||||||
|
num_matched_entities > best_num_matched_entities
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
# Fewer unmatched entities
|
||||||
|
(num_matched_entities == best_num_matched_entities)
|
||||||
|
and (num_unmatched_entities < best_num_unmatched_entities)
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
# Prefer unmatched ranges
|
||||||
|
(num_matched_entities == best_num_matched_entities)
|
||||||
|
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||||
|
and (num_unmatched_ranges > best_num_unmatched_ranges)
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
# Prefer match failures with entities
|
||||||
|
(result.text_chunks_matched == maybe_result.text_chunks_matched)
|
||||||
|
and (num_unmatched_entities == best_num_unmatched_entities)
|
||||||
|
and (num_unmatched_ranges == best_num_unmatched_ranges)
|
||||||
|
and (
|
||||||
|
("name" in result.entities)
|
||||||
|
or ("name" in result.unmatched_entities)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
maybe_result = result
|
||||||
|
best_num_matched_entities = num_matched_entities
|
||||||
|
best_num_unmatched_entities = num_unmatched_entities
|
||||||
|
best_num_unmatched_ranges = num_unmatched_ranges
|
||||||
|
|
||||||
|
return maybe_result
|
||||||
|
|
||||||
def _get_unexposed_entity_names(self, text: str) -> TextSlotList:
|
def _get_unexposed_entity_names(self, text: str) -> TextSlotList:
|
||||||
"""Get filtered slot list with unexposed entity names in Home Assistant."""
|
"""Get filtered slot list with unexposed entity names in Home Assistant."""
|
||||||
if self._unexposed_names_trie is None:
|
if self._unexposed_names_trie is None:
|
||||||
@@ -851,7 +962,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
if lang_intents is None:
|
if lang_intents is None:
|
||||||
return
|
return
|
||||||
|
|
||||||
self._make_slot_lists()
|
await self._make_slot_lists()
|
||||||
|
|
||||||
async def async_get_or_load_intents(self, language: str) -> LanguageIntents | None:
|
async def async_get_or_load_intents(self, language: str) -> LanguageIntents | None:
|
||||||
"""Load all intents of a language with lock."""
|
"""Load all intents of a language with lock."""
|
||||||
@@ -1002,12 +1113,85 @@ class DefaultAgent(ConversationEntity):
|
|||||||
intent_responses = responses_dict.get("intents", {})
|
intent_responses = responses_dict.get("intents", {})
|
||||||
error_responses = responses_dict.get("errors", {})
|
error_responses = responses_dict.get("errors", {})
|
||||||
|
|
||||||
|
if not self.fuzzy_matching:
|
||||||
|
_LOGGER.debug("Fuzzy matching is disabled")
|
||||||
|
return LanguageIntents(
|
||||||
|
intents,
|
||||||
|
intents_dict,
|
||||||
|
intent_responses,
|
||||||
|
error_responses,
|
||||||
|
language_variant,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load fuzzy
|
||||||
|
fuzzy_info = get_fuzzy_language(language_variant, json_load=json_load)
|
||||||
|
if fuzzy_info is None:
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Fuzzy matching not available for language: %s", language_variant
|
||||||
|
)
|
||||||
|
return LanguageIntents(
|
||||||
|
intents,
|
||||||
|
intents_dict,
|
||||||
|
intent_responses,
|
||||||
|
error_responses,
|
||||||
|
language_variant,
|
||||||
|
)
|
||||||
|
|
||||||
|
if self._fuzzy_config is None:
|
||||||
|
# Load shared config
|
||||||
|
self._fuzzy_config = get_fuzzy_config(json_load=json_load)
|
||||||
|
_LOGGER.debug("Loaded shared fuzzy matching config")
|
||||||
|
|
||||||
|
assert self._fuzzy_config is not None
|
||||||
|
|
||||||
|
fuzzy_matcher: FuzzyNgramMatcher | None = None
|
||||||
|
fuzzy_responses: FuzzyLanguageResponses | None = None
|
||||||
|
|
||||||
|
start_time = time.monotonic()
|
||||||
|
fuzzy_responses = fuzzy_info.responses
|
||||||
|
fuzzy_matcher = FuzzyNgramMatcher(
|
||||||
|
intents=intents,
|
||||||
|
intent_models={
|
||||||
|
intent_name: Sqlite3NgramModel(
|
||||||
|
order=fuzzy_model.order,
|
||||||
|
words={
|
||||||
|
word: str(word_id)
|
||||||
|
for word, word_id in fuzzy_model.words.items()
|
||||||
|
},
|
||||||
|
database_path=fuzzy_model.database_path,
|
||||||
|
)
|
||||||
|
for intent_name, fuzzy_model in fuzzy_info.ngram_models.items()
|
||||||
|
},
|
||||||
|
intent_slot_list_names=self._fuzzy_config.slot_list_names,
|
||||||
|
slot_combinations={
|
||||||
|
intent_name: {
|
||||||
|
combo_key: [
|
||||||
|
SlotCombinationInfo(
|
||||||
|
name_domains=(set(name_domains) if name_domains else None)
|
||||||
|
)
|
||||||
|
]
|
||||||
|
for combo_key, name_domains in intent_combos.items()
|
||||||
|
}
|
||||||
|
for intent_name, intent_combos in self._fuzzy_config.slot_combinations.items()
|
||||||
|
},
|
||||||
|
domain_keywords=fuzzy_info.domain_keywords,
|
||||||
|
stop_words=fuzzy_info.stop_words,
|
||||||
|
)
|
||||||
|
_LOGGER.debug(
|
||||||
|
"Loaded fuzzy matcher in %s second(s): language=%s, intents=%s",
|
||||||
|
time.monotonic() - start_time,
|
||||||
|
language_variant,
|
||||||
|
sorted(fuzzy_matcher.intent_models.keys()),
|
||||||
|
)
|
||||||
|
|
||||||
return LanguageIntents(
|
return LanguageIntents(
|
||||||
intents,
|
intents,
|
||||||
intents_dict,
|
intents_dict,
|
||||||
intent_responses,
|
intent_responses,
|
||||||
error_responses,
|
error_responses,
|
||||||
language_variant,
|
language_variant,
|
||||||
|
fuzzy_matcher=fuzzy_matcher,
|
||||||
|
fuzzy_responses=fuzzy_responses,
|
||||||
)
|
)
|
||||||
|
|
||||||
@core.callback
|
@core.callback
|
||||||
@@ -1027,8 +1211,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
# Slot lists have changed, so we must clear the cache
|
# Slot lists have changed, so we must clear the cache
|
||||||
self._intent_cache.clear()
|
self._intent_cache.clear()
|
||||||
|
|
||||||
@core.callback
|
async def _make_slot_lists(self) -> dict[str, SlotList]:
|
||||||
def _make_slot_lists(self) -> dict[str, SlotList]:
|
|
||||||
"""Create slot lists with areas and entity names/aliases."""
|
"""Create slot lists with areas and entity names/aliases."""
|
||||||
if self._slot_lists is not None:
|
if self._slot_lists is not None:
|
||||||
return self._slot_lists
|
return self._slot_lists
|
||||||
@@ -1089,6 +1272,10 @@ class DefaultAgent(ConversationEntity):
|
|||||||
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
|
"floor": TextSlotList.from_tuples(floor_names, allow_template=False),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Reload fuzzy matchers with new slot lists
|
||||||
|
if self.fuzzy_matching:
|
||||||
|
await self.hass.async_add_executor_job(self._load_fuzzy_matchers)
|
||||||
|
|
||||||
self._listen_clear_slot_list()
|
self._listen_clear_slot_list()
|
||||||
|
|
||||||
_LOGGER.debug(
|
_LOGGER.debug(
|
||||||
@@ -1098,6 +1285,25 @@ class DefaultAgent(ConversationEntity):
|
|||||||
|
|
||||||
return self._slot_lists
|
return self._slot_lists
|
||||||
|
|
||||||
|
def _load_fuzzy_matchers(self) -> None:
|
||||||
|
"""Reload fuzzy matchers for all loaded languages."""
|
||||||
|
for lang_intents in self._lang_intents.values():
|
||||||
|
if (not isinstance(lang_intents, LanguageIntents)) or (
|
||||||
|
lang_intents.fuzzy_matcher is None
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
lang_matcher = lang_intents.fuzzy_matcher
|
||||||
|
lang_intents.fuzzy_matcher = FuzzyNgramMatcher(
|
||||||
|
intents=lang_matcher.intents,
|
||||||
|
intent_models=lang_matcher.intent_models,
|
||||||
|
intent_slot_list_names=lang_matcher.intent_slot_list_names,
|
||||||
|
slot_combinations=lang_matcher.slot_combinations,
|
||||||
|
domain_keywords=lang_matcher.domain_keywords,
|
||||||
|
stop_words=lang_matcher.stop_words,
|
||||||
|
slot_lists=self._slot_lists,
|
||||||
|
)
|
||||||
|
|
||||||
def _make_intent_context(
|
def _make_intent_context(
|
||||||
self, user_input: ConversationInput
|
self, user_input: ConversationInput
|
||||||
) -> dict[str, Any] | None:
|
) -> dict[str, Any] | None:
|
||||||
@@ -1183,7 +1389,7 @@ class DefaultAgent(ConversationEntity):
|
|||||||
for trigger_intent in trigger_intents.intents.values():
|
for trigger_intent in trigger_intents.intents.values():
|
||||||
for intent_data in trigger_intent.data:
|
for intent_data in trigger_intent.data:
|
||||||
for sentence in intent_data.sentences:
|
for sentence in intent_data.sentences:
|
||||||
_collect_list_references(sentence, wildcard_names)
|
_collect_list_references(sentence.expression, wildcard_names)
|
||||||
|
|
||||||
for wildcard_name in wildcard_names:
|
for wildcard_name in wildcard_names:
|
||||||
trigger_intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
|
trigger_intents.slot_lists[wildcard_name] = WildcardSlotList(wildcard_name)
|
||||||
@@ -1520,11 +1726,9 @@ def _get_match_error_response(
|
|||||||
|
|
||||||
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
|
def _collect_list_references(expression: Expression, list_names: set[str]) -> None:
|
||||||
"""Collect list reference names recursively."""
|
"""Collect list reference names recursively."""
|
||||||
if isinstance(expression, Sequence):
|
if isinstance(expression, Group):
|
||||||
seq: Sequence = expression
|
for item in expression.items:
|
||||||
for item in seq.items:
|
|
||||||
_collect_list_references(item, list_names)
|
_collect_list_references(item, list_names)
|
||||||
elif isinstance(expression, ListReference):
|
elif isinstance(expression, ListReference):
|
||||||
# {list}
|
# {list}
|
||||||
list_ref: ListReference = expression
|
list_names.add(expression.slot_name)
|
||||||
list_names.add(list_ref.slot_name)
|
|
||||||
|
|||||||
@@ -26,7 +26,11 @@ from .agent_manager import (
|
|||||||
get_agent_manager,
|
get_agent_manager,
|
||||||
)
|
)
|
||||||
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
|
from .const import DATA_COMPONENT, DATA_DEFAULT_ENTITY
|
||||||
from .default_agent import METADATA_CUSTOM_FILE, METADATA_CUSTOM_SENTENCE
|
from .default_agent import (
|
||||||
|
METADATA_CUSTOM_FILE,
|
||||||
|
METADATA_CUSTOM_SENTENCE,
|
||||||
|
METADATA_FUZZY_MATCH,
|
||||||
|
)
|
||||||
from .entity import ConversationEntity
|
from .entity import ConversationEntity
|
||||||
from .models import ConversationInput
|
from .models import ConversationInput
|
||||||
|
|
||||||
@@ -240,6 +244,8 @@ async def websocket_hass_agent_debug(
|
|||||||
"sentence_template": "",
|
"sentence_template": "",
|
||||||
# When match is incomplete, this will contain the best slot guesses
|
# When match is incomplete, this will contain the best slot guesses
|
||||||
"unmatched_slots": _get_unmatched_slots(intent_result),
|
"unmatched_slots": _get_unmatched_slots(intent_result),
|
||||||
|
# True if match was not exact
|
||||||
|
"fuzzy_match": False,
|
||||||
}
|
}
|
||||||
|
|
||||||
if successful_match:
|
if successful_match:
|
||||||
@@ -251,16 +257,19 @@ async def websocket_hass_agent_debug(
|
|||||||
if intent_result.intent_sentence is not None:
|
if intent_result.intent_sentence is not None:
|
||||||
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
result_dict["sentence_template"] = intent_result.intent_sentence.text
|
||||||
|
|
||||||
# Inspect metadata to determine if this matched a custom sentence
|
if intent_result.intent_metadata:
|
||||||
if intent_result.intent_metadata and intent_result.intent_metadata.get(
|
# Inspect metadata to determine if this matched a custom sentence
|
||||||
METADATA_CUSTOM_SENTENCE
|
if intent_result.intent_metadata.get(METADATA_CUSTOM_SENTENCE):
|
||||||
):
|
result_dict["source"] = "custom"
|
||||||
result_dict["source"] = "custom"
|
result_dict["file"] = intent_result.intent_metadata.get(
|
||||||
result_dict["file"] = intent_result.intent_metadata.get(
|
METADATA_CUSTOM_FILE
|
||||||
METADATA_CUSTOM_FILE
|
)
|
||||||
|
else:
|
||||||
|
result_dict["source"] = "builtin"
|
||||||
|
|
||||||
|
result_dict["fuzzy_match"] = intent_result.intent_metadata.get(
|
||||||
|
METADATA_FUZZY_MATCH, False
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
result_dict["source"] = "builtin"
|
|
||||||
|
|
||||||
result_dicts.append(result_dict)
|
result_dicts.append(result_dict)
|
||||||
|
|
||||||
|
|||||||
@@ -6,5 +6,5 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||||
"integration_type": "system",
|
"integration_type": "system",
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.7.30"]
|
"requirements": ["hassil==3.1.0", "home-assistant-intents==2025.7.30"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -8,5 +8,5 @@
|
|||||||
"iot_class": "cloud_polling",
|
"iot_class": "cloud_polling",
|
||||||
"loggers": ["cookidoo_api"],
|
"loggers": ["cookidoo_api"],
|
||||||
"quality_scale": "silver",
|
"quality_scale": "silver",
|
||||||
"requirements": ["cookidoo-api==0.12.2"]
|
"requirements": ["cookidoo-api==0.14.0"]
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
|
from homeassistant.components import media_source
|
||||||
from homeassistant.components.media_player import (
|
from homeassistant.components.media_player import (
|
||||||
BrowseMedia,
|
BrowseMedia,
|
||||||
MediaClass,
|
MediaClass,
|
||||||
@@ -396,6 +397,15 @@ class DemoBrowsePlayer(AbstractDemoPlayer):
|
|||||||
|
|
||||||
_attr_supported_features = BROWSE_PLAYER_SUPPORT
|
_attr_supported_features = BROWSE_PLAYER_SUPPORT
|
||||||
|
|
||||||
|
async def async_browse_media(
|
||||||
|
self,
|
||||||
|
media_content_type: MediaType | str | None = None,
|
||||||
|
media_content_id: str | None = None,
|
||||||
|
) -> BrowseMedia:
|
||||||
|
"""Implement the websocket media browsing helper."""
|
||||||
|
|
||||||
|
return await media_source.async_browse_media(self.hass, media_content_id)
|
||||||
|
|
||||||
|
|
||||||
class DemoGroupPlayer(AbstractDemoPlayer):
|
class DemoGroupPlayer(AbstractDemoPlayer):
|
||||||
"""A Demo media player that supports grouping."""
|
"""A Demo media player that supports grouping."""
|
||||||
|
|||||||
@@ -6,7 +6,7 @@
|
|||||||
"documentation": "https://www.home-assistant.io/integrations/denonavr",
|
"documentation": "https://www.home-assistant.io/integrations/denonavr",
|
||||||
"iot_class": "local_push",
|
"iot_class": "local_push",
|
||||||
"loggers": ["denonavr"],
|
"loggers": ["denonavr"],
|
||||||
"requirements": ["denonavr==1.1.1"],
|
"requirements": ["denonavr==1.1.2"],
|
||||||
"ssdp": [
|
"ssdp": [
|
||||||
{
|
{
|
||||||
"manufacturer": "Denon",
|
"manufacturer": "Denon",
|
||||||
|
|||||||
@@ -61,7 +61,7 @@ class DeviceCondition(Condition):
|
|||||||
self._hass = hass
|
self._hass = hass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
async def async_validate_condition_config(
|
async def async_validate_config(
|
||||||
cls, hass: HomeAssistant, config: ConfigType
|
cls, hass: HomeAssistant, config: ConfigType
|
||||||
) -> ConfigType:
|
) -> ConfigType:
|
||||||
"""Validate device condition config."""
|
"""Validate device condition config."""
|
||||||
@@ -69,7 +69,7 @@ class DeviceCondition(Condition):
|
|||||||
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
|
hass, config, cv.DEVICE_CONDITION_SCHEMA, DeviceAutomationType.CONDITION
|
||||||
)
|
)
|
||||||
|
|
||||||
async def async_condition_from_config(self) -> condition.ConditionCheckerType:
|
async def async_get_checker(self) -> condition.ConditionCheckerType:
|
||||||
"""Test a device condition."""
|
"""Test a device condition."""
|
||||||
platform = await async_get_device_automation_platform(
|
platform = await async_get_device_automation_platform(
|
||||||
self._hass, self._config[CONF_DOMAIN], DeviceAutomationType.CONDITION
|
self._hass, self._config[CONF_DOMAIN], DeviceAutomationType.CONDITION
|
||||||
@@ -80,7 +80,7 @@ class DeviceCondition(Condition):
|
|||||||
|
|
||||||
|
|
||||||
CONDITIONS: dict[str, type[Condition]] = {
|
CONDITIONS: dict[str, type[Condition]] = {
|
||||||
"device": DeviceCondition,
|
"_device": DeviceCondition,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -15,8 +15,8 @@
|
|||||||
],
|
],
|
||||||
"quality_scale": "internal",
|
"quality_scale": "internal",
|
||||||
"requirements": [
|
"requirements": [
|
||||||
"aiodhcpwatcher==1.2.0",
|
"aiodhcpwatcher==1.2.1",
|
||||||
"aiodiscover==2.7.0",
|
"aiodiscover==2.7.1",
|
||||||
"cached-ipaddress==0.10.0"
|
"cached-ipaddress==0.10.0"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user