mirror of
https://github.com/home-assistant/core.git
synced 2025-11-08 18:39:30 +00:00
Compare commits
429 Commits
2025.9.0b6
...
mqtt-json-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e2e907963a | ||
|
|
104ff0f1e1 | ||
|
|
4985f9a5a1 | ||
|
|
ae70ca7cba | ||
|
|
66d1cf8af7 | ||
|
|
27c0df3da8 | ||
|
|
5413131885 | ||
|
|
0acd77e60a | ||
|
|
c5d552dc4a | ||
|
|
4f045b45ac | ||
|
|
4ad29161bd | ||
|
|
fea7f537a8 | ||
|
|
442b6e9cca | ||
|
|
531b67101d | ||
|
|
596a3fc879 | ||
|
|
0e8295604e | ||
|
|
9cfdb99e76 | ||
|
|
b1a6e403fb | ||
|
|
937d3e4a96 | ||
|
|
a368ad4ab5 | ||
|
|
254694b024 | ||
|
|
bcfa7a7383 | ||
|
|
ab7081d26a | ||
|
|
4762c64c25 | ||
|
|
1b99ffe61b | ||
|
|
d5132e8ea9 | ||
|
|
1bcf3cfbb2 | ||
|
|
d4d912ef55 | ||
|
|
393826635b | ||
|
|
9edd5c35e0 | ||
|
|
e8c1d3dc3c | ||
|
|
46463ea4f8 | ||
|
|
88e6b0c8d9 | ||
|
|
2ed92c720f | ||
|
|
7389f23d9a | ||
|
|
a0cef80cf2 | ||
|
|
343b17788f | ||
|
|
50349e49f1 | ||
|
|
42d0415a86 | ||
|
|
1428b41a25 | ||
|
|
e65b4292b2 | ||
|
|
2fc2bb97fc | ||
|
|
40da606177 | ||
|
|
d613b69e4e | ||
|
|
3c5d09e114 | ||
|
|
9c54cc369b | ||
|
|
f91e4090f9 | ||
|
|
2cdf0b74d5 | ||
|
|
86750ae5c3 | ||
|
|
c1eb492616 | ||
|
|
ac154c020c | ||
|
|
0e23eb9ebd | ||
|
|
8367930f42 | ||
|
|
d71b1246cf | ||
|
|
4ad664a652 | ||
|
|
002493c3e1 | ||
|
|
720ecde568 | ||
|
|
4c548830b4 | ||
|
|
214925e10a | ||
|
|
885256299f | ||
|
|
e73c670025 | ||
|
|
e3c0cfd1e2 | ||
|
|
46c38f185c | ||
|
|
9082637133 | ||
|
|
0a35fd0ea4 | ||
|
|
c4649fc068 | ||
|
|
b496637bdd | ||
|
|
7d471f9624 | ||
|
|
83f3b3e3eb | ||
|
|
4592d6370a | ||
|
|
ccef31a37a | ||
|
|
6a482b1a3e | ||
|
|
6f00f8a920 | ||
|
|
ceeeb22040 | ||
|
|
2cda0817b2 | ||
|
|
881a0bd1fa | ||
|
|
dba6f419c9 | ||
|
|
fad8d4fca2 | ||
|
|
1663ad1adb | ||
|
|
6a8152bc7f | ||
|
|
e5edccd56f | ||
|
|
480527eb68 | ||
|
|
1475108f1c | ||
|
|
a1e68336fc | ||
|
|
07392e3ff7 | ||
|
|
3e39f77e92 | ||
|
|
a12617645b | ||
|
|
723476457e | ||
|
|
7053727426 | ||
|
|
0f4ce58f28 | ||
|
|
2c72cd3832 | ||
|
|
e8d5615e54 | ||
|
|
7a332d489d | ||
|
|
504421e257 | ||
|
|
a0ace3b082 | ||
|
|
aea055b444 | ||
|
|
5b107349a1 | ||
|
|
46fa98e0b2 | ||
|
|
96e66009e5 | ||
|
|
ad14a66187 | ||
|
|
777ac97acb | ||
|
|
af07ab4752 | ||
|
|
74b731528d | ||
|
|
c361c32407 | ||
|
|
75c1eddaf9 | ||
|
|
715aba3aca | ||
|
|
285619e913 | ||
|
|
eaf400f3b7 | ||
|
|
3d79a73110 | ||
|
|
6271765eaf | ||
|
|
9e73ff06d2 | ||
|
|
36edfd8c04 | ||
|
|
a750cfcac6 | ||
|
|
026f20932a | ||
|
|
07d4e11c30 | ||
|
|
9c80d75588 | ||
|
|
1818a103b6 | ||
|
|
3e4bb4eb7e | ||
|
|
1117b92dde | ||
|
|
f5a1523068 | ||
|
|
7005a70a4e | ||
|
|
9b862a8e4e | ||
|
|
f5dba77636 | ||
|
|
da7f9f6154 | ||
|
|
9a92d58613 | ||
|
|
9ea438024d | ||
|
|
58edc3742a | ||
|
|
3c0580880d | ||
|
|
04b5eb7d53 | ||
|
|
cecae10a15 | ||
|
|
2e2b9483df | ||
|
|
0444467858 | ||
|
|
d990c2bee2 | ||
|
|
03a7052151 | ||
|
|
4025e23c67 | ||
|
|
82c3fcccc9 | ||
|
|
7ee7a3c0b5 | ||
|
|
e7cb0173b0 | ||
|
|
dbdbf1cf16 | ||
|
|
b5704f3e8b | ||
|
|
df3d4b5db1 | ||
|
|
0ab232b904 | ||
|
|
acc75e4419 | ||
|
|
8aa672882a | ||
|
|
3cbf3bdf4c | ||
|
|
56c865dcfe | ||
|
|
b7360dfad8 | ||
|
|
f2204e97ab | ||
|
|
98df5f5f0c | ||
|
|
064d43480d | ||
|
|
1536375e82 | ||
|
|
39e9ffff29 | ||
|
|
12f152d6e4 | ||
|
|
c7f0560208 | ||
|
|
65603a3829 | ||
|
|
38ea5c6813 | ||
|
|
5b1fd8f58b | ||
|
|
e5f99a617f | ||
|
|
7f8b5f2288 | ||
|
|
75f69cd5b6 | ||
|
|
d7fab27351 | ||
|
|
6c6ec7534f | ||
|
|
8e3780264a | ||
|
|
78b009dd8f | ||
|
|
76d72ad280 | ||
|
|
e5be9426a4 | ||
|
|
0922f12ec0 | ||
|
|
89f424e1d3 | ||
|
|
143eb20d99 | ||
|
|
3187506eb9 | ||
|
|
a328b23437 | ||
|
|
7e6a949559 | ||
|
|
da7db5e22b | ||
|
|
ec58943c8c | ||
|
|
61a05490e9 | ||
|
|
6a1629d2ed | ||
|
|
106e1ce224 | ||
|
|
601d63e3b7 | ||
|
|
6a5f5b9adc | ||
|
|
8ecf5a98a5 | ||
|
|
1728c577f7 | ||
|
|
1006d5e0ba | ||
|
|
6c29d5dc49 | ||
|
|
a4e086f0d9 | ||
|
|
0fecf012e6 | ||
|
|
63c8bfaa9b | ||
|
|
435926fd41 | ||
|
|
c621f0c139 | ||
|
|
fa9007777d | ||
|
|
e1afadb28c | ||
|
|
34c45eae56 | ||
|
|
71b8da6497 | ||
|
|
caa0e357ee | ||
|
|
0721ac6c73 | ||
|
|
783c742e09 | ||
|
|
d2324086af | ||
|
|
2ffd5f4c97 | ||
|
|
aa4a110923 | ||
|
|
0a3032e766 | ||
|
|
c4db422355 | ||
|
|
f4e0b9ba15 | ||
|
|
f3b997720d | ||
|
|
f5d3a89f90 | ||
|
|
b90296d853 | ||
|
|
c6d6349908 | ||
|
|
2be6f17505 | ||
|
|
4c953f36c8 | ||
|
|
ec2fa202e9 | ||
|
|
1a970e6c88 | ||
|
|
b9db828df3 | ||
|
|
50c0f41e8f | ||
|
|
8cc66ee96c | ||
|
|
71981975a4 | ||
|
|
b875af9667 | ||
|
|
89cd55c878 | ||
|
|
b25708cec2 | ||
|
|
bb9c65bc4b | ||
|
|
447c7b64a9 | ||
|
|
b111a33b8c | ||
|
|
4fcd02bc5d | ||
|
|
80d26b8d2e | ||
|
|
a475ecb342 | ||
|
|
42aec9cd91 | ||
|
|
5409181b79 | ||
|
|
c1945211fa | ||
|
|
29537dc87d | ||
|
|
72e1a8f912 | ||
|
|
b742e4898c | ||
|
|
e5565c75f6 | ||
|
|
a7ca618327 | ||
|
|
6cbb881647 | ||
|
|
eae1fe4a56 | ||
|
|
8d945d89de | ||
|
|
86e7f3713f | ||
|
|
3bc772a196 | ||
|
|
c2290d6edb | ||
|
|
2a458dcec9 | ||
|
|
ab5ef3674f | ||
|
|
f28251bc76 | ||
|
|
1cca65b5c5 | ||
|
|
ed134e22f9 | ||
|
|
300c582ea0 | ||
|
|
52f7e20b5c | ||
|
|
813098cb1a | ||
|
|
000df08bca | ||
|
|
9b80cf7d94 | ||
|
|
220c233c0b | ||
|
|
cdf7d8df16 | ||
|
|
3385151c26 | ||
|
|
111fa78c57 | ||
|
|
e67df73c4e | ||
|
|
b9f24bbb2a | ||
|
|
18ca9590f0 | ||
|
|
eccadd4a11 | ||
|
|
aeff62faea | ||
|
|
e5a44e5966 | ||
|
|
1369a98fa3 | ||
|
|
0e1dd04083 | ||
|
|
df46816b2f | ||
|
|
955ef3b5e7 | ||
|
|
5fc6fb9cf3 | ||
|
|
9ee9e1775d | ||
|
|
712c9b9edc | ||
|
|
d571857770 | ||
|
|
de90922297 | ||
|
|
e0b3a5337c | ||
|
|
215603fae1 | ||
|
|
1a12c619e9 | ||
|
|
34c061df19 | ||
|
|
c12b638b3d | ||
|
|
b9427deed2 | ||
|
|
d66016588b | ||
|
|
229d0bdc77 | ||
|
|
078425918e | ||
|
|
da2f154111 | ||
|
|
270a9a5a98 | ||
|
|
9f953c2e35 | ||
|
|
8f16b09751 | ||
|
|
73ab041051 | ||
|
|
4bb76c6d94 | ||
|
|
7378d3607c | ||
|
|
7d1e36af7f | ||
|
|
8b03a23ed8 | ||
|
|
a023dfc013 | ||
|
|
fa0f707872 | ||
|
|
a8f56e4b96 | ||
|
|
61c904d225 | ||
|
|
a8ff14ecb8 | ||
|
|
3909906823 | ||
|
|
e0bf7749e6 | ||
|
|
72128e9708 | ||
|
|
1b9acdc233 | ||
|
|
0f530485d1 | ||
|
|
0928e9a6ee | ||
|
|
75d792207a | ||
|
|
ceda62f6ea | ||
|
|
12ab84a5d9 | ||
|
|
8e85faf997 | ||
|
|
b514a14c10 | ||
|
|
6b609b019e | ||
|
|
10baae92a0 | ||
|
|
8e1ee32190 | ||
|
|
814b98c2a3 | ||
|
|
243569f6b8 | ||
|
|
4b7817f1df | ||
|
|
180f898bfa | ||
|
|
e9dcde1bb5 | ||
|
|
f44b6a3a39 | ||
|
|
9b6b8003ec | ||
|
|
2503157282 | ||
|
|
7b2b3e9e33 | ||
|
|
2d5f228308 | ||
|
|
19f36fc630 | ||
|
|
6b6553dae3 | ||
|
|
0865d3f749 | ||
|
|
095f73d84f | ||
|
|
3b60961f02 | ||
|
|
0d9079ea72 | ||
|
|
f17db80428 | ||
|
|
2d4b2e822a | ||
|
|
b08a72a53d | ||
|
|
1e4fa40a77 | ||
|
|
ac0ff96f26 | ||
|
|
2e50cee555 | ||
|
|
51c6c1b0d2 | ||
|
|
c4fce1c793 | ||
|
|
581f8a9378 | ||
|
|
d7e6f84d28 | ||
|
|
5e22533fc0 | ||
|
|
ecae074dd7 | ||
|
|
55b0406960 | ||
|
|
36483dd785 | ||
|
|
ad154dce40 | ||
|
|
3abf91af3a | ||
|
|
579d217c6b | ||
|
|
7322bee4dd | ||
|
|
8a36ec88f4 | ||
|
|
864f908257 | ||
|
|
5d86d8b380 | ||
|
|
15245707a5 | ||
|
|
80e4451a3f | ||
|
|
f051f4ea99 | ||
|
|
7717b5aca6 | ||
|
|
81d2bcdeb9 | ||
|
|
9934de18ae | ||
|
|
aac015e822 | ||
|
|
1f584f011e | ||
|
|
2106c4cfb9 | ||
|
|
a053142601 | ||
|
|
dd0dce7968 | ||
|
|
bdfff6df2d | ||
|
|
671c4e1eab | ||
|
|
8aae2a935a | ||
|
|
9e64f18439 | ||
|
|
e8a6f2f098 | ||
|
|
8faeb1fe98 | ||
|
|
edc48e0604 | ||
|
|
eab77f11b0 | ||
|
|
edb79b0337 | ||
|
|
41f33a106f | ||
|
|
cf31401cc2 | ||
|
|
8679c8e40c | ||
|
|
e675d0e8ed | ||
|
|
c73289aed9 | ||
|
|
4420776977 | ||
|
|
b77d6e7b59 | ||
|
|
8f074e5724 | ||
|
|
b1e46bcde4 | ||
|
|
fc4b5f66ff | ||
|
|
55978f2827 | ||
|
|
010a8cc693 | ||
|
|
d31eadc8cd | ||
|
|
3190a523aa | ||
|
|
8f82e451cd | ||
|
|
5bbd71e594 | ||
|
|
33257b8422 | ||
|
|
b3a4cd5b76 | ||
|
|
dcfa466dd4 | ||
|
|
846e6d96a4 | ||
|
|
d72b35a0cd | ||
|
|
5e003627b2 | ||
|
|
a4f71f37f6 | ||
|
|
c37b2f86b1 | ||
|
|
926aeef156 | ||
|
|
ee86671d39 | ||
|
|
dc371cf46d | ||
|
|
c76e26508d | ||
|
|
a5cd316fa3 | ||
|
|
736cc8a17d | ||
|
|
5278fce218 | ||
|
|
8f04f22c65 | ||
|
|
a01f638fc6 | ||
|
|
22005dd48a | ||
|
|
fff60b3863 | ||
|
|
5cb5fe5b67 | ||
|
|
24ea5eb9b5 | ||
|
|
673c2a77e0 | ||
|
|
ad3014e711 | ||
|
|
c19ae81cbc | ||
|
|
959d99f333 | ||
|
|
7cfe6bf427 | ||
|
|
765e2c1b6c | ||
|
|
0fd63df123 | ||
|
|
862fbd551a | ||
|
|
6e79b76d15 | ||
|
|
f85307d86c | ||
|
|
b01f93119f | ||
|
|
4130f3db2f | ||
|
|
ffcd5167b5 | ||
|
|
e94a7b2ec1 | ||
|
|
6b3f2e9b7b | ||
|
|
56545dacb0 | ||
|
|
cbf061183e | ||
|
|
5dcb5f4926 | ||
|
|
5fbb99a79a | ||
|
|
da65c52f2d | ||
|
|
08a850cfc7 | ||
|
|
12978092f7 | ||
|
|
210a9ad2de | ||
|
|
61328129fc | ||
|
|
f4673f44ee | ||
|
|
3bdd532dcd | ||
|
|
e23d3c8ab4 | ||
|
|
a7cb66c592 | ||
|
|
8544d1ebec | ||
|
|
240afd80c1 | ||
|
|
ccb1da3a97 | ||
|
|
de62991e5b | ||
|
|
bad75222ed |
@@ -8,6 +8,8 @@
|
||||
"PYTHONASYNCIODEBUG": "1"
|
||||
},
|
||||
"features": {
|
||||
// Node feature required for Claude Code until fixed https://github.com/anthropics/devcontainer-features/issues/28
|
||||
"ghcr.io/devcontainers/features/node:1": {},
|
||||
"ghcr.io/anthropics/devcontainer-features/claude-code:1.0": {},
|
||||
"ghcr.io/devcontainers/features/github-cli:1": {}
|
||||
},
|
||||
|
||||
10
.github/workflows/builder.yml
vendored
10
.github/workflows/builder.yml
vendored
@@ -32,7 +32,7 @@ jobs:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -116,7 +116,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -457,7 +457,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -480,7 +480,7 @@ jobs:
|
||||
python -m build
|
||||
|
||||
- name: Upload package to PyPI
|
||||
uses: pypa/gh-action-pypi-publish@v1.12.4
|
||||
uses: pypa/gh-action-pypi-publish@v1.13.0
|
||||
with:
|
||||
skip-existing: true
|
||||
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@e8998f949152b193b063cb0ec769d69d929409be # v2.4.0
|
||||
uses: actions/attest-build-provenance@977bb373ede98d70efdf65b84cb5f73e068dcc2a # v3.0.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
||||
171
.github/workflows/ci.yaml
vendored
171
.github/workflows/ci.yaml
vendored
@@ -37,10 +37,10 @@ on:
|
||||
type: boolean
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 7
|
||||
CACHE_VERSION: 8
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.9"
|
||||
HA_SHORT_VERSION: "2025.10"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -61,6 +61,9 @@ env:
|
||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
UV_CACHE_DIR: /tmp/uv-cache
|
||||
APT_CACHE_BASE: /home/runner/work/apt
|
||||
APT_CACHE_DIR: /home/runner/work/apt/cache
|
||||
APT_LIST_CACHE_DIR: /home/runner/work/apt/lists
|
||||
SQLALCHEMY_WARN_20: 1
|
||||
PYTHONASYNCIODEBUG: 1
|
||||
HASS_CI: 1
|
||||
@@ -78,6 +81,7 @@ jobs:
|
||||
core: ${{ steps.core.outputs.changes }}
|
||||
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
|
||||
integrations: ${{ steps.integrations.outputs.changes }}
|
||||
apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }}
|
||||
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
||||
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
||||
requirements: ${{ steps.core.outputs.requirements }}
|
||||
@@ -111,6 +115,10 @@ jobs:
|
||||
run: >-
|
||||
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial apt restore key
|
||||
id: generate_apt_cache_key
|
||||
run: |
|
||||
echo "key=$(lsb_release -rs)-apt-${{ env.CACHE_VERSION }}-${{ env.HA_SHORT_VERSION }}" >> $GITHUB_OUTPUT
|
||||
- name: Filter for core changes
|
||||
uses: dorny/paths-filter@v3.0.2
|
||||
id: core
|
||||
@@ -249,7 +257,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -294,7 +302,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -334,7 +342,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -374,7 +382,7 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
id: python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
@@ -484,7 +492,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -515,15 +523,36 @@ jobs:
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-uv-${{
|
||||
env.UV_CACHE_VERSION }}-${{ steps.generate-uv-key.outputs.version }}-${{
|
||||
env.HA_SHORT_VERSION }}-
|
||||
- name: Restore apt cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
id: cache-apt
|
||||
uses: actions/cache@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
mkdir -p ${{ env.APT_CACHE_DIR }}
|
||||
mkdir -p ${{ env.APT_LIST_CACHE_DIR }}
|
||||
fi
|
||||
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
libxml2-utils \
|
||||
libavcodec-dev \
|
||||
libavdevice-dev \
|
||||
libavfilter-dev \
|
||||
@@ -533,6 +562,10 @@ jobs:
|
||||
libswresample-dev \
|
||||
libswscale-dev \
|
||||
libudev-dev
|
||||
|
||||
if [[ "${{ steps.cache-apt.outputs.cache-hit }}" != 'true' ]]; then
|
||||
sudo chmod -R 755 ${{ env.APT_CACHE_BASE }}
|
||||
fi
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
@@ -577,17 +610,31 @@ jobs:
|
||||
- info
|
||||
- base
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -620,7 +667,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -677,7 +724,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -720,7 +767,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -767,7 +814,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -812,7 +859,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -876,11 +923,25 @@ jobs:
|
||||
- mypy
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -889,7 +950,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -936,11 +997,25 @@ jobs:
|
||||
name: >-
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -950,7 +1025,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1069,11 +1144,25 @@ jobs:
|
||||
name: >-
|
||||
Run ${{ matrix.mariadb-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1083,7 +1172,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1209,11 +1298,25 @@ jobs:
|
||||
name: >-
|
||||
Run ${{ matrix.postgresql-group }} tests Python ${{ matrix.python-version }}
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1225,7 +1328,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1341,7 +1444,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.5.0
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1370,11 +1473,25 @@ jobs:
|
||||
name: >-
|
||||
Run tests Python ${{ matrix.python-version }} (${{ matrix.group }})
|
||||
steps:
|
||||
- name: Restore apt cache
|
||||
uses: actions/cache/restore@v4.2.4
|
||||
with:
|
||||
path: |
|
||||
${{ env.APT_CACHE_DIR }}
|
||||
${{ env.APT_LIST_CACHE_DIR }}
|
||||
fail-on-cache-miss: true
|
||||
key: >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ needs.info.outputs.apt_cache_key }}
|
||||
- name: Install additional OS dependencies
|
||||
timeout-minutes: 10
|
||||
run: |
|
||||
sudo rm /etc/apt/sources.list.d/microsoft-prod.list
|
||||
sudo apt-get update
|
||||
sudo apt-get update \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }}
|
||||
sudo apt-get -y install \
|
||||
-o Dir::Cache=${{ env.APT_CACHE_DIR }} \
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
bluez \
|
||||
ffmpeg \
|
||||
libturbojpeg \
|
||||
@@ -1384,7 +1501,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -1491,7 +1608,7 @@ jobs:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.5.0
|
||||
uses: codecov/codecov-action@v5.5.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.29.11
|
||||
uses: github/codeql-action/init@v3.30.3
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.29.11
|
||||
uses: github/codeql-action/analyze@v3.30.3
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check if integration label was added and extract details
|
||||
id: extract
|
||||
uses: actions/github-script@v7.0.1
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
// Debug: Log the event payload
|
||||
@@ -113,7 +113,7 @@ jobs:
|
||||
- name: Fetch similar issues
|
||||
id: fetch_similar
|
||||
if: steps.extract.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
INTEGRATION_LABELS: ${{ steps.extract.outputs.integration_labels }}
|
||||
CURRENT_NUMBER: ${{ steps.extract.outputs.current_number }}
|
||||
@@ -280,7 +280,7 @@ jobs:
|
||||
- name: Post duplicate detection results
|
||||
id: post_results
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_detection.outputs.response }}
|
||||
SIMILAR_ISSUES: ${{ steps.fetch_similar.outputs.similar_issues }}
|
||||
|
||||
@@ -16,7 +16,7 @@ jobs:
|
||||
steps:
|
||||
- name: Check issue language
|
||||
id: detect_language
|
||||
uses: actions/github-script@v7.0.1
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
ISSUE_NUMBER: ${{ github.event.issue.number }}
|
||||
ISSUE_TITLE: ${{ github.event.issue.title }}
|
||||
@@ -90,7 +90,7 @@ jobs:
|
||||
|
||||
- name: Process non-English issues
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/github-script@v7.0.1
|
||||
uses: actions/github-script@v8
|
||||
env:
|
||||
AI_RESPONSE: ${{ steps.ai_language_detection.outputs.response }}
|
||||
ISSUE_NUMBER: ${{ steps.detect_language.outputs.issue_number }}
|
||||
|
||||
2
.github/workflows/restrict-task-creation.yml
vendored
2
.github/workflows/restrict-task-creation.yml
vendored
@@ -12,7 +12,7 @@ jobs:
|
||||
if: github.event.issue.type.name == 'Task'
|
||||
steps:
|
||||
- name: Check if user is authorized
|
||||
uses: actions/github-script@v7
|
||||
uses: actions/github-script@v8
|
||||
with:
|
||||
script: |
|
||||
const issueAuthor = context.payload.issue.user.login;
|
||||
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@v9.1.0
|
||||
uses: actions/stale@v10.0.0
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@v9.1.0
|
||||
uses: actions/stale@v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@v9.1.0
|
||||
uses: actions/stale@v10.0.0
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@v5.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
2
.github/workflows/wheels.yml
vendored
2
.github/workflows/wheels.yml
vendored
@@ -36,7 +36,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.6.0
|
||||
uses: actions/setup-python@v6.0.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.12.1
|
||||
rev: v0.13.0
|
||||
hooks:
|
||||
- id: ruff-check
|
||||
args:
|
||||
|
||||
@@ -169,6 +169,7 @@ homeassistant.components.dnsip.*
|
||||
homeassistant.components.doorbird.*
|
||||
homeassistant.components.dormakaba_dkey.*
|
||||
homeassistant.components.downloader.*
|
||||
homeassistant.components.droplet.*
|
||||
homeassistant.components.dsmr.*
|
||||
homeassistant.components.duckdns.*
|
||||
homeassistant.components.dunehd.*
|
||||
@@ -307,6 +308,7 @@ homeassistant.components.ld2410_ble.*
|
||||
homeassistant.components.led_ble.*
|
||||
homeassistant.components.lektrico.*
|
||||
homeassistant.components.letpot.*
|
||||
homeassistant.components.libre_hardware_monitor.*
|
||||
homeassistant.components.lidarr.*
|
||||
homeassistant.components.lifx.*
|
||||
homeassistant.components.light.*
|
||||
@@ -382,6 +384,7 @@ homeassistant.components.openai_conversation.*
|
||||
homeassistant.components.openexchangerates.*
|
||||
homeassistant.components.opensky.*
|
||||
homeassistant.components.openuv.*
|
||||
homeassistant.components.opnsense.*
|
||||
homeassistant.components.opower.*
|
||||
homeassistant.components.oralb.*
|
||||
homeassistant.components.otbr.*
|
||||
@@ -399,6 +402,7 @@ homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
homeassistant.components.portainer.*
|
||||
homeassistant.components.powerfox.*
|
||||
homeassistant.components.powerwall.*
|
||||
homeassistant.components.private_ble_device.*
|
||||
@@ -458,6 +462,7 @@ homeassistant.components.sensorpush_cloud.*
|
||||
homeassistant.components.sensoterra.*
|
||||
homeassistant.components.senz.*
|
||||
homeassistant.components.sfr_box.*
|
||||
homeassistant.components.sftp_storage.*
|
||||
homeassistant.components.shell_command.*
|
||||
homeassistant.components.shelly.*
|
||||
homeassistant.components.shopping_list.*
|
||||
|
||||
68
CODEOWNERS
generated
68
CODEOWNERS
generated
@@ -154,10 +154,10 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/arve/ @ikalnyi
|
||||
/homeassistant/components/aseko_pool_live/ @milanmeu
|
||||
/tests/components/aseko_pool_live/ @milanmeu
|
||||
/homeassistant/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/tests/components/assist_pipeline/ @balloob @synesthesiam
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/assist_pipeline/ @synesthesiam @arturpragacz
|
||||
/tests/components/assist_pipeline/ @synesthesiam @arturpragacz
|
||||
/homeassistant/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/assist_satellite/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||
/tests/components/asuswrt/ @kennedyshead @ollo69 @Vaskivskyi
|
||||
/homeassistant/components/atag/ @MatsNL
|
||||
@@ -298,8 +298,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/configurator/ @home-assistant/core
|
||||
/homeassistant/components/control4/ @lawtancool
|
||||
/tests/components/control4/ @lawtancool
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/conversation/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/cookidoo/ @miaucl
|
||||
/tests/components/cookidoo/ @miaucl
|
||||
/homeassistant/components/coolmaster/ @OnFreund
|
||||
@@ -377,6 +377,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dremel_3d_printer/ @tkdrob
|
||||
/homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/tests/components/drop_connect/ @ChandlerSystems @pfrazer
|
||||
/homeassistant/components/droplet/ @sarahseidman
|
||||
/tests/components/droplet/ @sarahseidman
|
||||
/homeassistant/components/dsmr/ @Robbie1221
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
@@ -464,8 +466,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
/tests/components/event/ @home-assistant/core
|
||||
/homeassistant/components/evil_genius_labs/ @balloob
|
||||
/tests/components/evil_genius_labs/ @balloob
|
||||
/homeassistant/components/evohome/ @zxdavb
|
||||
/tests/components/evohome/ @zxdavb
|
||||
/homeassistant/components/ezviz/ @RenierM26
|
||||
@@ -515,8 +515,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/forked_daapd/ @uvjustin
|
||||
/tests/components/forked_daapd/ @uvjustin
|
||||
/homeassistant/components/fortios/ @kimfrellsen
|
||||
/homeassistant/components/foscam/ @krmarien
|
||||
/tests/components/foscam/ @krmarien
|
||||
/homeassistant/components/foscam/ @Foscam-wangzhengyu
|
||||
/tests/components/foscam/ @Foscam-wangzhengyu
|
||||
/homeassistant/components/freebox/ @hacf-fr @Quentame
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
@@ -650,6 +650,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/homeassistant/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_alerts/ @home-assistant/core
|
||||
/tests/components/homeassistant_alerts/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_connect_zbt2/ @home-assistant/core
|
||||
/tests/components/homeassistant_connect_zbt2/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_green/ @home-assistant/core
|
||||
/tests/components/homeassistant_green/ @home-assistant/core
|
||||
/homeassistant/components/homeassistant_hardware/ @home-assistant/core
|
||||
@@ -678,8 +680,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/http/ @home-assistant/core
|
||||
/homeassistant/components/huawei_lte/ @scop @fphammerle
|
||||
/tests/components/huawei_lte/ @scop @fphammerle
|
||||
/homeassistant/components/hue/ @balloob @marcelveldt
|
||||
/tests/components/hue/ @balloob @marcelveldt
|
||||
/homeassistant/components/hue/ @marcelveldt
|
||||
/tests/components/hue/ @marcelveldt
|
||||
/homeassistant/components/huisbaasje/ @dennisschroer
|
||||
/tests/components/huisbaasje/ @dennisschroer
|
||||
/homeassistant/components/humidifier/ @home-assistant/core @Shulyaka
|
||||
@@ -751,8 +753,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/integration/ @dgomes
|
||||
/homeassistant/components/intellifire/ @jeeftor
|
||||
/tests/components/intellifire/ @jeeftor
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam
|
||||
/homeassistant/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/tests/components/intent/ @home-assistant/core @synesthesiam @arturpragacz
|
||||
/homeassistant/components/intesishome/ @jnimmo
|
||||
/homeassistant/components/iometer/ @MaestroOnICe
|
||||
/tests/components/iometer/ @MaestroOnICe
|
||||
@@ -860,6 +862,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/lg_netcast/ @Drafteed @splinter98
|
||||
/homeassistant/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/tests/components/lg_thinq/ @LG-ThinQ-Integration
|
||||
/homeassistant/components/libre_hardware_monitor/ @Sab44
|
||||
/tests/components/libre_hardware_monitor/ @Sab44
|
||||
/homeassistant/components/lidarr/ @tkdrob
|
||||
/tests/components/lidarr/ @tkdrob
|
||||
/homeassistant/components/lifx/ @Djelibeybi
|
||||
@@ -1108,8 +1112,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/open_meteo/ @frenck
|
||||
/homeassistant/components/open_router/ @joostlek
|
||||
/tests/components/open_router/ @joostlek
|
||||
/homeassistant/components/openai_conversation/ @balloob
|
||||
/tests/components/openai_conversation/ @balloob
|
||||
/homeassistant/components/openerz/ @misialq
|
||||
/tests/components/openerz/ @misialq
|
||||
/homeassistant/components/openexchangerates/ @MartinHjelmare
|
||||
@@ -1189,6 +1191,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/pooldose/ @lmaertin
|
||||
/homeassistant/components/poolsense/ @haemishkyd
|
||||
/tests/components/poolsense/ @haemishkyd
|
||||
/homeassistant/components/portainer/ @erwindouna
|
||||
/tests/components/portainer/ @erwindouna
|
||||
/homeassistant/components/powerfox/ @klaasnicolaas
|
||||
/tests/components/powerfox/ @klaasnicolaas
|
||||
/homeassistant/components/powerwall/ @bdraco @jrester @daniel-simpson
|
||||
@@ -1208,8 +1212,6 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/proximity/ @mib1185
|
||||
/tests/components/proximity/ @mib1185
|
||||
/homeassistant/components/proxmoxve/ @jhollowe @Corbeno
|
||||
/homeassistant/components/prusalink/ @balloob
|
||||
/tests/components/prusalink/ @balloob
|
||||
/homeassistant/components/ps4/ @ktnrg45
|
||||
/tests/components/ps4/ @ktnrg45
|
||||
/homeassistant/components/pterodactyl/ @elmurato
|
||||
@@ -1303,8 +1305,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/rflink/ @javicalle
|
||||
/homeassistant/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
/tests/components/rfxtrx/ @danielhiversen @elupus @RobBie1221
|
||||
/homeassistant/components/rhasspy/ @balloob @synesthesiam
|
||||
/tests/components/rhasspy/ @balloob @synesthesiam
|
||||
/homeassistant/components/rhasspy/ @synesthesiam
|
||||
/tests/components/rhasspy/ @synesthesiam
|
||||
/homeassistant/components/ridwell/ @bachya
|
||||
/tests/components/ridwell/ @bachya
|
||||
/homeassistant/components/ring/ @sdb9696
|
||||
@@ -1392,12 +1394,14 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/seventeentrack/ @shaiu
|
||||
/homeassistant/components/sfr_box/ @epenet
|
||||
/tests/components/sfr_box/ @epenet
|
||||
/homeassistant/components/sftp_storage/ @maretodoric
|
||||
/tests/components/sftp_storage/ @maretodoric
|
||||
/homeassistant/components/sharkiq/ @JeffResc @funkybunch
|
||||
/tests/components/sharkiq/ @JeffResc @funkybunch
|
||||
/homeassistant/components/shell_command/ @home-assistant/core
|
||||
/tests/components/shell_command/ @home-assistant/core
|
||||
/homeassistant/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
/tests/components/shelly/ @balloob @bieniu @thecode @chemelli74 @bdraco
|
||||
/homeassistant/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
/tests/components/shelly/ @bieniu @thecode @chemelli74 @bdraco
|
||||
/homeassistant/components/shodan/ @fabaff
|
||||
/homeassistant/components/sia/ @eavanvalkenburg
|
||||
/tests/components/sia/ @eavanvalkenburg
|
||||
@@ -1544,8 +1548,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/systemmonitor/ @gjohansson-ST
|
||||
/homeassistant/components/tado/ @erwindouna
|
||||
/tests/components/tado/ @erwindouna
|
||||
/homeassistant/components/tag/ @balloob @dmulcahey
|
||||
/tests/components/tag/ @balloob @dmulcahey
|
||||
/homeassistant/components/tag/ @home-assistant/core
|
||||
/tests/components/tag/ @home-assistant/core
|
||||
/homeassistant/components/tailscale/ @frenck
|
||||
/tests/components/tailscale/ @frenck
|
||||
/homeassistant/components/tailwind/ @frenck
|
||||
@@ -1690,15 +1694,15 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vegehub/ @ghowevege
|
||||
/homeassistant/components/velbus/ @Cereal2nd @brefra
|
||||
/tests/components/velbus/ @Cereal2nd @brefra
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio
|
||||
/homeassistant/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
/tests/components/velux/ @Julius2342 @DeerMaximum @pawlizio @wollew
|
||||
/homeassistant/components/venstar/ @garbled1 @jhollowe
|
||||
/tests/components/venstar/ @garbled1 @jhollowe
|
||||
/homeassistant/components/versasense/ @imstevenxyz
|
||||
/homeassistant/components/version/ @ludeeus
|
||||
/tests/components/version/ @ludeeus
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak
|
||||
/homeassistant/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/tests/components/vesync/ @markperdue @webdjoe @thegardenmonkey @cdnninja @iprak @sapuseven
|
||||
/homeassistant/components/vicare/ @CFenner
|
||||
/tests/components/vicare/ @CFenner
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
@@ -1710,8 +1714,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
/homeassistant/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/tests/components/vodafone_station/ @paoloantinori @chemelli74
|
||||
/homeassistant/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/tests/components/voip/ @balloob @synesthesiam @jaminh
|
||||
/homeassistant/components/voip/ @synesthesiam @jaminh
|
||||
/tests/components/voip/ @synesthesiam @jaminh
|
||||
/homeassistant/components/volumio/ @OnFreund
|
||||
/tests/components/volumio/ @OnFreund
|
||||
/homeassistant/components/volvo/ @thomasddn
|
||||
@@ -1782,8 +1786,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/worldclock/ @fabaff
|
||||
/homeassistant/components/ws66i/ @ssaenger
|
||||
/tests/components/ws66i/ @ssaenger
|
||||
/homeassistant/components/wyoming/ @balloob @synesthesiam
|
||||
/tests/components/wyoming/ @balloob @synesthesiam
|
||||
/homeassistant/components/wyoming/ @synesthesiam
|
||||
/tests/components/wyoming/ @synesthesiam
|
||||
/homeassistant/components/xbox/ @hunterjm
|
||||
/tests/components/xbox/ @hunterjm
|
||||
/homeassistant/components/xiaomi_aqara/ @danielhiversen @syssi
|
||||
|
||||
@@ -3,8 +3,7 @@ FROM mcr.microsoft.com/vscode/devcontainers/base:debian
|
||||
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
|
||||
|
||||
RUN \
|
||||
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | apt-key add - \
|
||||
&& apt-get update \
|
||||
apt-get update \
|
||||
&& DEBIAN_FRONTEND=noninteractive apt-get install -y --no-install-recommends \
|
||||
# Additional library needed by some tests and accordingly by VScode Tests Discovery
|
||||
bluez \
|
||||
|
||||
10
build.yaml
10
build.yaml
@@ -1,10 +1,10 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.0
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.0
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.0
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.0
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.09.1
|
||||
armhf: ghcr.io/home-assistant/armhf-homeassistant-base:2025.09.1
|
||||
armv7: ghcr.io/home-assistant/armv7-homeassistant-base:2025.09.1
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.09.1
|
||||
i386: ghcr.io/home-assistant/i386-homeassistant-base:2025.09.1
|
||||
codenotary:
|
||||
signer: notary@home-assistant.io
|
||||
base_image: notary@home-assistant.io
|
||||
|
||||
@@ -187,36 +187,42 @@ def main() -> int:
|
||||
|
||||
from . import config, runner # noqa: PLC0415
|
||||
|
||||
safe_mode = config.safe_mode_enabled(config_dir)
|
||||
# Ensure only one instance runs per config directory
|
||||
with runner.ensure_single_execution(config_dir) as single_execution_lock:
|
||||
# Check if another instance is already running
|
||||
if single_execution_lock.exit_code is not None:
|
||||
return single_execution_lock.exit_code
|
||||
|
||||
runtime_conf = runner.RuntimeConfig(
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
skip_pip=args.skip_pip,
|
||||
skip_pip_packages=args.skip_pip_packages,
|
||||
recovery_mode=args.recovery_mode,
|
||||
debug=args.debug,
|
||||
open_ui=args.open_ui,
|
||||
safe_mode=safe_mode,
|
||||
)
|
||||
safe_mode = config.safe_mode_enabled(config_dir)
|
||||
|
||||
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
|
||||
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
|
||||
faulthandler.enable(fault_file)
|
||||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
runtime_conf = runner.RuntimeConfig(
|
||||
config_dir=config_dir,
|
||||
verbose=args.verbose,
|
||||
log_rotate_days=args.log_rotate_days,
|
||||
log_file=args.log_file,
|
||||
log_no_color=args.log_no_color,
|
||||
skip_pip=args.skip_pip,
|
||||
skip_pip_packages=args.skip_pip_packages,
|
||||
recovery_mode=args.recovery_mode,
|
||||
debug=args.debug,
|
||||
open_ui=args.open_ui,
|
||||
safe_mode=safe_mode,
|
||||
)
|
||||
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
fault_file_name = os.path.join(config_dir, FAULT_LOG_FILENAME)
|
||||
with open(fault_file_name, mode="a", encoding="utf8") as fault_file:
|
||||
faulthandler.enable(fault_file)
|
||||
exit_code = runner.run(runtime_conf)
|
||||
faulthandler.disable()
|
||||
|
||||
check_threads()
|
||||
# It's possible for the fault file to disappear, so suppress obvious errors
|
||||
with suppress(FileNotFoundError):
|
||||
if os.path.getsize(fault_file_name) == 0:
|
||||
os.remove(fault_file_name)
|
||||
|
||||
return exit_code
|
||||
check_threads()
|
||||
|
||||
return exit_code
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
@@ -27,7 +27,7 @@ from . import (
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ["pyotp==2.8.0"]
|
||||
REQUIREMENTS = ["pyotp==2.9.0"]
|
||||
|
||||
CONF_MESSAGE = "message"
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@ from . import (
|
||||
SetupFlow,
|
||||
)
|
||||
|
||||
REQUIREMENTS = ["pyotp==2.8.0", "PyQRCode==1.2.1"]
|
||||
REQUIREMENTS = ["pyotp==2.9.0", "PyQRCode==1.2.1"]
|
||||
|
||||
CONFIG_SCHEMA = MULTI_FACTOR_AUTH_MODULE_SCHEMA.extend({}, extra=vol.PREVENT_EXTRA)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "fritzbox",
|
||||
"name": "FRITZ!Box",
|
||||
"name": "FRITZ!",
|
||||
"integrations": ["fritz", "fritzbox", "fritzbox_callmonitor"]
|
||||
}
|
||||
|
||||
@@ -6,7 +6,6 @@
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
"google_drive",
|
||||
"google_gemini",
|
||||
"google_generative_ai_conversation",
|
||||
"google_mail",
|
||||
"google_maps",
|
||||
|
||||
@@ -50,6 +50,7 @@ class AccuWeatherFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
await self.async_set_unique_id(
|
||||
accuweather.location_key, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured()
|
||||
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME], data=user_input
|
||||
|
||||
@@ -69,5 +69,5 @@ POLLEN_CATEGORY_MAP = {
|
||||
4: "very_high",
|
||||
5: "extreme",
|
||||
}
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=40)
|
||||
UPDATE_INTERVAL_OBSERVATION = timedelta(minutes=10)
|
||||
UPDATE_INTERVAL_DAILY_FORECAST = timedelta(hours=6)
|
||||
|
||||
@@ -7,6 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.2.0"],
|
||||
"single_config_entry": true
|
||||
"requirements": ["accuweather==4.2.1"]
|
||||
}
|
||||
|
||||
@@ -17,6 +17,9 @@
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_api_key": "[%key:common::config_flow::error::invalid_api_key%]",
|
||||
"requests_exceeded": "The allowed number of requests to the AccuWeather API has been exceeded. You have to wait or change the API key."
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_location%]"
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
|
||||
@@ -126,7 +126,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_TASK_NAME): cv.string,
|
||||
vol.Required(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Optional(ATTR_ENTITY_ID): cv.entity_id,
|
||||
vol.Required(ATTR_INSTRUCTIONS): cv.string,
|
||||
vol.Optional(ATTR_ATTACHMENTS): vol.All(
|
||||
cv.ensure_list, [selector.MediaSelector({"accept": ["*/*"]})]
|
||||
@@ -163,9 +163,10 @@ async def async_service_generate_image(call: ServiceCall) -> ServiceResponse:
|
||||
class AITaskPreferences:
|
||||
"""AI Task preferences."""
|
||||
|
||||
KEYS = ("gen_data_entity_id",)
|
||||
KEYS = ("gen_data_entity_id", "gen_image_entity_id")
|
||||
|
||||
gen_data_entity_id: str | None = None
|
||||
gen_image_entity_id: str | None = None
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize the preferences."""
|
||||
@@ -179,17 +180,21 @@ class AITaskPreferences:
|
||||
if data is None:
|
||||
return
|
||||
for key in self.KEYS:
|
||||
setattr(self, key, data[key])
|
||||
setattr(self, key, data.get(key))
|
||||
|
||||
@callback
|
||||
def async_set_preferences(
|
||||
self,
|
||||
*,
|
||||
gen_data_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
gen_image_entity_id: str | None | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Set the preferences."""
|
||||
changed = False
|
||||
for key, value in (("gen_data_entity_id", gen_data_entity_id),):
|
||||
for key, value in (
|
||||
("gen_data_entity_id", gen_data_entity_id),
|
||||
("gen_image_entity_id", gen_image_entity_id),
|
||||
):
|
||||
if value is not UNDEFINED:
|
||||
if getattr(self, key) != value:
|
||||
setattr(self, key, value)
|
||||
@@ -211,7 +216,6 @@ class ImageView(HomeAssistantView):
|
||||
|
||||
url = f"/api/{DOMAIN}/images/{{filename}}"
|
||||
name = f"api:{DOMAIN}/images"
|
||||
requires_auth = False
|
||||
|
||||
async def get(
|
||||
self,
|
||||
|
||||
@@ -60,6 +60,10 @@ class AITaskEntity(RestoreEntity):
|
||||
task: GenDataTask | GenImageTask,
|
||||
) -> AsyncGenerator[ChatLog]:
|
||||
"""Context manager used to manage the ChatLog used during an AI Task."""
|
||||
user_llm_hass_api: llm.API | None = None
|
||||
if isinstance(task, GenDataTask):
|
||||
user_llm_hass_api = task.llm_api
|
||||
|
||||
# pylint: disable-next=contextmanager-generator-missing-cleanup
|
||||
with (
|
||||
async_get_chat_log(
|
||||
@@ -77,6 +81,7 @@ class AITaskEntity(RestoreEntity):
|
||||
device_id=None,
|
||||
),
|
||||
user_llm_prompt=DEFAULT_SYSTEM_PROMPT,
|
||||
user_llm_hass_api=user_llm_hass_api,
|
||||
)
|
||||
|
||||
chat_log.async_add_user_content(
|
||||
|
||||
@@ -37,6 +37,7 @@ def websocket_get_preferences(
|
||||
{
|
||||
vol.Required("type"): "ai_task/preferences/set",
|
||||
vol.Optional("gen_data_entity_id"): vol.Any(str, None),
|
||||
vol.Optional("gen_image_entity_id"): vol.Any(str, None),
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
|
||||
@@ -2,8 +2,10 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.components.media_player import BrowseError, MediaClass
|
||||
from homeassistant.components.media_source import (
|
||||
BrowseMediaSource,
|
||||
@@ -14,7 +16,7 @@ from homeassistant.components.media_source import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .const import DATA_IMAGES, DOMAIN
|
||||
from .const import DATA_IMAGES, DOMAIN, IMAGE_EXPIRY_TIME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,7 +45,14 @@ class ImageMediaSource(MediaSource):
|
||||
if image is None:
|
||||
raise Unresolvable(f"Could not resolve media item: {item.identifier}")
|
||||
|
||||
return PlayMedia(f"/api/{DOMAIN}/images/{item.identifier}", image.mime_type)
|
||||
return PlayMedia(
|
||||
async_sign_path(
|
||||
self.hass,
|
||||
f"/api/{DOMAIN}/images/{item.identifier}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
),
|
||||
image.mime_type,
|
||||
)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from datetime import datetime, timedelta
|
||||
from functools import partial
|
||||
import mimetypes
|
||||
from pathlib import Path
|
||||
@@ -13,8 +13,10 @@ from typing import Any
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import camera, conversation, media_source
|
||||
from homeassistant.components.http.auth import async_sign_path
|
||||
from homeassistant.core import HomeAssistant, ServiceResponse, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.chat_session import ChatSession, async_get_chat_session
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.network import get_url
|
||||
@@ -115,6 +117,7 @@ async def async_generate_data(
|
||||
instructions: str,
|
||||
structure: vol.Schema | None = None,
|
||||
attachments: list[dict] | None = None,
|
||||
llm_api: llm.API | None = None,
|
||||
) -> GenDataTaskResult:
|
||||
"""Run a data generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
@@ -150,6 +153,7 @@ async def async_generate_data(
|
||||
instructions=instructions,
|
||||
structure=structure,
|
||||
attachments=resolved_attachments or None,
|
||||
llm_api=llm_api,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -176,11 +180,17 @@ async def async_generate_image(
|
||||
hass: HomeAssistant,
|
||||
*,
|
||||
task_name: str,
|
||||
entity_id: str,
|
||||
entity_id: str | None = None,
|
||||
instructions: str,
|
||||
attachments: list[dict] | None = None,
|
||||
) -> ServiceResponse:
|
||||
"""Run an image generation task in the AI Task integration."""
|
||||
if entity_id is None:
|
||||
entity_id = hass.data[DATA_PREFERENCES].gen_image_entity_id
|
||||
|
||||
if entity_id is None:
|
||||
raise HomeAssistantError("No entity_id provided and no preferred entity set")
|
||||
|
||||
entity = hass.data[DATA_COMPONENT].get_entity(entity_id)
|
||||
if entity is None:
|
||||
raise HomeAssistantError(f"AI Task entity {entity_id} not found")
|
||||
@@ -239,7 +249,11 @@ async def async_generate_image(
|
||||
if IMAGE_EXPIRY_TIME > 0:
|
||||
async_call_later(hass, IMAGE_EXPIRY_TIME, partial(_purge_image, filename))
|
||||
|
||||
service_result["url"] = get_url(hass) + f"/api/{DOMAIN}/images/{filename}"
|
||||
service_result["url"] = get_url(hass) + async_sign_path(
|
||||
hass,
|
||||
f"/api/{DOMAIN}/images/{filename}",
|
||||
timedelta(seconds=IMAGE_EXPIRY_TIME or 1800),
|
||||
)
|
||||
service_result["media_source_id"] = f"media-source://{DOMAIN}/images/{filename}"
|
||||
|
||||
return service_result
|
||||
@@ -261,6 +275,9 @@ class GenDataTask:
|
||||
attachments: list[conversation.Attachment] | None = None
|
||||
"""List of attachments to go along the instructions."""
|
||||
|
||||
llm_api: llm.API | None = None
|
||||
"""API to provide to the LLM."""
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Return task as a string."""
|
||||
return f"<GenDataTask {self.name}: {id(self)}>"
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from airos.airos8 import AirOS
|
||||
from airos.airos8 import AirOS8
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -23,7 +23,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AirOSConfigEntry) -> boo
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS(
|
||||
airos_device = AirOS8(
|
||||
host=entry.data[CONF_HOST],
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
|
||||
@@ -15,7 +15,7 @@ from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
|
||||
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -27,7 +27,7 @@ PARALLEL_UPDATES = 0
|
||||
class AirOSBinarySensorEntityDescription(BinarySensorEntityDescription):
|
||||
"""Describe an AirOS binary sensor."""
|
||||
|
||||
value_fn: Callable[[AirOSData], bool]
|
||||
value_fn: Callable[[AirOS8Data], bool]
|
||||
|
||||
|
||||
BINARY_SENSORS: tuple[AirOSBinarySensorEntityDescription, ...] = (
|
||||
|
||||
@@ -19,7 +19,7 @@ from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AirOS
|
||||
from .coordinator import AirOS8
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -48,7 +48,7 @@ class AirOSConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# with no option in the web UI to change or upload a custom certificate.
|
||||
session = async_get_clientsession(self.hass, verify_ssl=False)
|
||||
|
||||
airos_device = AirOS(
|
||||
airos_device = AirOS8(
|
||||
host=user_input[CONF_HOST],
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
|
||||
@@ -4,7 +4,7 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from airos.airos8 import AirOS, AirOSData
|
||||
from airos.airos8 import AirOS8, AirOS8Data
|
||||
from airos.exceptions import (
|
||||
AirOSConnectionAuthenticationError,
|
||||
AirOSConnectionSetupError,
|
||||
@@ -24,13 +24,13 @@ _LOGGER = logging.getLogger(__name__)
|
||||
type AirOSConfigEntry = ConfigEntry[AirOSDataUpdateCoordinator]
|
||||
|
||||
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSData]):
|
||||
class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOS8Data]):
|
||||
"""Class to manage fetching AirOS data from single endpoint."""
|
||||
|
||||
config_entry: AirOSConfigEntry
|
||||
|
||||
def __init__(
|
||||
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS
|
||||
self, hass: HomeAssistant, config_entry: AirOSConfigEntry, airos_device: AirOS8
|
||||
) -> None:
|
||||
"""Initialize the coordinator."""
|
||||
self.airos_device = airos_device
|
||||
@@ -42,7 +42,7 @@ class AirOSDataUpdateCoordinator(DataUpdateCoordinator[AirOSData]):
|
||||
update_interval=SCAN_INTERVAL,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> AirOSData:
|
||||
async def _async_update_data(self) -> AirOS8Data:
|
||||
"""Fetch data from AirOS."""
|
||||
try:
|
||||
await self.airos_device.login()
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airos",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["airos==0.4.4"]
|
||||
"requirements": ["airos==0.5.1"]
|
||||
}
|
||||
|
||||
@@ -26,7 +26,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import StateType
|
||||
|
||||
from .coordinator import AirOSConfigEntry, AirOSData, AirOSDataUpdateCoordinator
|
||||
from .coordinator import AirOS8Data, AirOSConfigEntry, AirOSDataUpdateCoordinator
|
||||
from .entity import AirOSEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -42,7 +42,7 @@ PARALLEL_UPDATES = 0
|
||||
class AirOSSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describe an AirOS sensor."""
|
||||
|
||||
value_fn: Callable[[AirOSData], StateType]
|
||||
value_fn: Callable[[AirOS8Data], StateType]
|
||||
|
||||
|
||||
SENSORS: tuple[AirOSSensorEntityDescription, ...] = (
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/airzone",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioairzone"],
|
||||
"requirements": ["aioairzone==1.0.0"]
|
||||
"requirements": ["aioairzone==1.0.1"]
|
||||
}
|
||||
|
||||
@@ -4,6 +4,11 @@
|
||||
"codeowners": ["@swcloudgenie"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["application_credentials"],
|
||||
"dhcp": [
|
||||
{
|
||||
"hostname": "gdocntl-*"
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/aladdin_connect",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
|
||||
@@ -7,6 +7,9 @@
|
||||
"reauth_confirm": {
|
||||
"title": "[%key:common::config_flow::title::reauth%]",
|
||||
"description": "Aladdin Connect needs to re-authenticate your account"
|
||||
},
|
||||
"oauth_discovery": {
|
||||
"description": "Home Assistant has found an Aladdin Connect device on your network. Press **Submit** to continue setting up Aladdin Connect."
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
|
||||
@@ -61,7 +61,7 @@ ALARM_SERVICE_SCHEMA: Final = make_entity_service_schema(
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Track states and offer events for sensors."""
|
||||
"""Set up the alarm control panel component."""
|
||||
component = hass.data[DATA_COMPONENT] = EntityComponent[AlarmControlPanelEntity](
|
||||
_LOGGER, DOMAIN, hass, SCAN_INTERVAL
|
||||
)
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
"""Support for repeating alerts when conditions are met."""
|
||||
"""Support for repeating alerts when conditions are met.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -63,7 +66,10 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Alert component."""
|
||||
"""Set up the Alert component.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
component = EntityComponent[AlertEntity](LOGGER, DOMAIN, hass)
|
||||
|
||||
entities: list[AlertEntity] = []
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
"""Support for repeating alerts when conditions are met."""
|
||||
"""Support for repeating alerts when conditions are met.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -27,7 +30,10 @@ from .const import DOMAIN, LOGGER
|
||||
|
||||
|
||||
class AlertEntity(Entity):
|
||||
"""Representation of an alert."""
|
||||
"""Representation of an alert.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
"""Reproduce an Alert state."""
|
||||
"""Reproduce an Alert state.
|
||||
|
||||
DEVELOPMENT OF THE ALERT INTEGRATION IS FROZEN.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
@@ -5,7 +5,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client, config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, COUNTRY_DOMAINS, DOMAIN
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, CONF_SITE, COUNTRY_DOMAINS, DOMAIN
|
||||
from .coordinator import AmazonConfigEntry, AmazonDevicesCoordinator
|
||||
from .services import async_setup_services
|
||||
|
||||
@@ -42,7 +42,23 @@ async def async_setup_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bo
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: AmazonConfigEntry) -> bool:
|
||||
"""Migrate old entry."""
|
||||
if entry.version == 1 and entry.minor_version == 1:
|
||||
|
||||
if entry.version == 1 and entry.minor_version < 3:
|
||||
if CONF_SITE in entry.data:
|
||||
# Site in data (wrong place), just move to login data
|
||||
new_data = entry.data.copy()
|
||||
new_data[CONF_LOGIN_DATA][CONF_SITE] = new_data[CONF_SITE]
|
||||
new_data.pop(CONF_SITE)
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data=new_data, version=1, minor_version=3
|
||||
)
|
||||
return True
|
||||
|
||||
if CONF_SITE in entry.data[CONF_LOGIN_DATA]:
|
||||
# Site is there, just update version to avoid future migrations
|
||||
hass.config_entries.async_update_entry(entry, version=1, minor_version=3)
|
||||
return True
|
||||
|
||||
_LOGGER.debug(
|
||||
"Migrating from version %s.%s", entry.version, entry.minor_version
|
||||
)
|
||||
@@ -53,10 +69,10 @@ async def async_migrate_entry(hass: HomeAssistant, entry: AmazonConfigEntry) ->
|
||||
|
||||
# Add site to login data
|
||||
new_data = entry.data.copy()
|
||||
new_data[CONF_LOGIN_DATA]["site"] = f"https://www.amazon.{domain}"
|
||||
new_data[CONF_LOGIN_DATA][CONF_SITE] = f"https://www.amazon.{domain}"
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data=new_data, version=1, minor_version=2
|
||||
entry, data=new_data, version=1, minor_version=3
|
||||
)
|
||||
|
||||
_LOGGER.info(
|
||||
|
||||
@@ -52,7 +52,7 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Alexa Devices."""
|
||||
|
||||
VERSION = 1
|
||||
MINOR_VERSION = 2
|
||||
MINOR_VERSION = 3
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -107,7 +107,9 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
if user_input is not None:
|
||||
try:
|
||||
await validate_input(self.hass, {**reauth_entry.data, **user_input})
|
||||
data = await validate_input(
|
||||
self.hass, {**reauth_entry.data, **user_input}
|
||||
)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (CannotAuthenticate, TypeError):
|
||||
@@ -119,8 +121,9 @@ class AmazonDevicesConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
reauth_entry,
|
||||
data={
|
||||
CONF_USERNAME: entry_data[CONF_USERNAME],
|
||||
CONF_PASSWORD: entry_data[CONF_PASSWORD],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
CONF_LOGIN_DATA: data,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -6,6 +6,7 @@ _LOGGER = logging.getLogger(__package__)
|
||||
|
||||
DOMAIN = "alexa_devices"
|
||||
CONF_LOGIN_DATA = "login_data"
|
||||
CONF_SITE = "site"
|
||||
|
||||
DEFAULT_DOMAIN = "com"
|
||||
COUNTRY_DOMAINS = {
|
||||
|
||||
@@ -14,6 +14,7 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
@@ -48,12 +49,13 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
entry.data[CONF_PASSWORD],
|
||||
entry.data[CONF_LOGIN_DATA],
|
||||
)
|
||||
self.previous_devices: set[str] = set()
|
||||
|
||||
async def _async_update_data(self) -> dict[str, AmazonDevice]:
|
||||
"""Update device data."""
|
||||
try:
|
||||
await self.api.login_mode_stored_data()
|
||||
return await self.api.get_devices_data()
|
||||
data = await self.api.get_devices_data()
|
||||
except CannotConnect as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -72,3 +74,31 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={"error": repr(err)},
|
||||
) from err
|
||||
else:
|
||||
current_devices = set(data.keys())
|
||||
if stale_devices := self.previous_devices - current_devices:
|
||||
await self._async_remove_device_stale(stale_devices)
|
||||
|
||||
self.previous_devices = current_devices
|
||||
return data
|
||||
|
||||
async def _async_remove_device_stale(
|
||||
self,
|
||||
stale_devices: set[str],
|
||||
) -> None:
|
||||
"""Remove stale device."""
|
||||
device_registry = dr.async_get(self.hass)
|
||||
|
||||
for serial_num in stale_devices:
|
||||
_LOGGER.debug(
|
||||
"Detected change in devices: serial %s removed",
|
||||
serial_num,
|
||||
)
|
||||
device = device_registry.async_get_device(
|
||||
identifiers={(DOMAIN, serial_num)}
|
||||
)
|
||||
if device:
|
||||
device_registry.async_update_device(
|
||||
device_id=device.id,
|
||||
remove_config_entry_id=self.config_entry.entry_id,
|
||||
)
|
||||
|
||||
@@ -64,9 +64,7 @@ rules:
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no known use cases for repair issues or flows, yet
|
||||
stale-devices:
|
||||
status: todo
|
||||
comment: automate the cleanup process
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
||||
@@ -104,10 +104,6 @@
|
||||
"sound": {
|
||||
"name": "Alexa Skill sound file",
|
||||
"description": "The sound file to play."
|
||||
},
|
||||
"sound_variant": {
|
||||
"name": "Sound variant",
|
||||
"description": "The variant of the sound to play."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
@@ -24,7 +24,12 @@ from homeassistant.components.recorder import (
|
||||
get_instance as get_recorder_instance,
|
||||
)
|
||||
from homeassistant.config_entries import SOURCE_IGNORE
|
||||
from homeassistant.const import ATTR_DOMAIN, BASE_PLATFORMS, __version__ as HA_VERSION
|
||||
from homeassistant.const import (
|
||||
ATTR_ASSUMED_STATE,
|
||||
ATTR_DOMAIN,
|
||||
BASE_PLATFORMS,
|
||||
__version__ as HA_VERSION,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr, entity_registry as er
|
||||
@@ -389,66 +394,117 @@ def _domains_from_yaml_config(yaml_configuration: dict[str, Any]) -> set[str]:
|
||||
|
||||
|
||||
async def async_devices_payload(hass: HomeAssistant) -> dict:
|
||||
"""Return the devices payload."""
|
||||
devices: list[dict[str, Any]] = []
|
||||
"""Return detailed information about entities and devices."""
|
||||
integrations_info: dict[str, dict[str, Any]] = {}
|
||||
|
||||
dev_reg = dr.async_get(hass)
|
||||
# Devices that need via device info set
|
||||
new_indexes: dict[str, int] = {}
|
||||
via_devices: dict[str, str] = {}
|
||||
|
||||
seen_integrations = set()
|
||||
# We need to refer to other devices, for example in `via_device` field.
|
||||
# We don't however send the original device ids outside of Home Assistant,
|
||||
# instead we refer to devices by (integration_domain, index_in_integration_device_list).
|
||||
device_id_mapping: dict[str, tuple[str, int]] = {}
|
||||
|
||||
for device in dev_reg.devices.values():
|
||||
if not device.primary_config_entry:
|
||||
for device_entry in dev_reg.devices.values():
|
||||
if not device_entry.primary_config_entry:
|
||||
continue
|
||||
|
||||
config_entry = hass.config_entries.async_get_entry(device.primary_config_entry)
|
||||
config_entry = hass.config_entries.async_get_entry(
|
||||
device_entry.primary_config_entry
|
||||
)
|
||||
|
||||
if config_entry is None:
|
||||
continue
|
||||
|
||||
seen_integrations.add(config_entry.domain)
|
||||
integration_domain = config_entry.domain
|
||||
integration_info = integrations_info.setdefault(
|
||||
integration_domain, {"devices": [], "entities": []}
|
||||
)
|
||||
|
||||
new_indexes[device.id] = len(devices)
|
||||
devices.append(
|
||||
devices_info = integration_info["devices"]
|
||||
|
||||
device_id_mapping[device_entry.id] = (integration_domain, len(devices_info))
|
||||
|
||||
devices_info.append(
|
||||
{
|
||||
"integration": config_entry.domain,
|
||||
"manufacturer": device.manufacturer,
|
||||
"model_id": device.model_id,
|
||||
"model": device.model,
|
||||
"sw_version": device.sw_version,
|
||||
"hw_version": device.hw_version,
|
||||
"has_configuration_url": device.configuration_url is not None,
|
||||
"via_device": None,
|
||||
"entry_type": device.entry_type.value if device.entry_type else None,
|
||||
"entities": [],
|
||||
"entry_type": device_entry.entry_type,
|
||||
"has_configuration_url": device_entry.configuration_url is not None,
|
||||
"hw_version": device_entry.hw_version,
|
||||
"manufacturer": device_entry.manufacturer,
|
||||
"model": device_entry.model,
|
||||
"model_id": device_entry.model_id,
|
||||
"sw_version": device_entry.sw_version,
|
||||
"via_device": device_entry.via_device_id,
|
||||
}
|
||||
)
|
||||
|
||||
if device.via_device_id:
|
||||
via_devices[device.id] = device.via_device_id
|
||||
# Fill out via_device with new device ids
|
||||
for integration_info in integrations_info.values():
|
||||
for device_info in integration_info["devices"]:
|
||||
if device_info["via_device"] is None:
|
||||
continue
|
||||
device_info["via_device"] = device_id_mapping.get(device_info["via_device"])
|
||||
|
||||
for from_device, via_device in via_devices.items():
|
||||
if via_device not in new_indexes:
|
||||
continue
|
||||
devices[new_indexes[from_device]]["via_device"] = new_indexes[via_device]
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
for entity_entry in ent_reg.entities.values():
|
||||
integration_domain = entity_entry.platform
|
||||
integration_info = integrations_info.setdefault(
|
||||
integration_domain, {"devices": [], "entities": []}
|
||||
)
|
||||
|
||||
devices_info = integration_info["devices"]
|
||||
entities_info = integration_info["entities"]
|
||||
|
||||
entity_state = hass.states.get(entity_entry.entity_id)
|
||||
|
||||
entity_info = {
|
||||
# LIMITATION: `assumed_state` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
# It is also not present, if entity is not in the state machine,
|
||||
# which can happen for disabled entities.
|
||||
"assumed_state": entity_state.attributes.get(ATTR_ASSUMED_STATE, False)
|
||||
if entity_state is not None
|
||||
else None,
|
||||
"capabilities": entity_entry.capabilities,
|
||||
"domain": entity_entry.domain,
|
||||
"entity_category": entity_entry.entity_category,
|
||||
"has_entity_name": entity_entry.has_entity_name,
|
||||
"original_device_class": entity_entry.original_device_class,
|
||||
# LIMITATION: `unit_of_measurement` can be overridden by users;
|
||||
# we should replace it with the original value in the future.
|
||||
"unit_of_measurement": entity_entry.unit_of_measurement,
|
||||
}
|
||||
|
||||
if (
|
||||
((device_id := entity_entry.device_id) is not None)
|
||||
and ((new_device_id := device_id_mapping.get(device_id)) is not None)
|
||||
and (new_device_id[0] == integration_domain)
|
||||
):
|
||||
device_info = devices_info[new_device_id[1]]
|
||||
device_info["entities"].append(entity_info)
|
||||
else:
|
||||
entities_info.append(entity_info)
|
||||
|
||||
integrations = {
|
||||
domain: integration
|
||||
for domain, integration in (
|
||||
await async_get_integrations(hass, seen_integrations)
|
||||
await async_get_integrations(hass, integrations_info.keys())
|
||||
).items()
|
||||
if isinstance(integration, Integration)
|
||||
}
|
||||
|
||||
for device_info in devices:
|
||||
if integration := integrations.get(device_info["integration"]):
|
||||
device_info["is_custom_integration"] = not integration.is_built_in
|
||||
for domain, integration_info in integrations_info.items():
|
||||
if integration := integrations.get(domain):
|
||||
integration_info["is_custom_integration"] = not integration.is_built_in
|
||||
# Include version for custom integrations
|
||||
if not integration.is_built_in and integration.version:
|
||||
device_info["custom_integration_version"] = str(integration.version)
|
||||
integration_info["custom_integration_version"] = str(
|
||||
integration.version
|
||||
)
|
||||
|
||||
return {
|
||||
"version": "home-assistant:1",
|
||||
"home_assistant": HA_VERSION,
|
||||
"devices": devices,
|
||||
"integrations": integrations_info,
|
||||
}
|
||||
|
||||
@@ -37,7 +37,7 @@ from .helpers import AndroidTVRemoteConfigEntry, create_api, get_enable_ime
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
APPS_NEW_ID = "NewApp"
|
||||
APPS_NEW_ID = "add_new"
|
||||
CONF_APP_DELETE = "app_delete"
|
||||
CONF_APP_ID = "app_id"
|
||||
|
||||
@@ -66,9 +66,14 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
self.host = user_input[CONF_HOST]
|
||||
api = create_api(self.hass, self.host, enable_ime=False)
|
||||
await api.async_generate_cert_if_missing()
|
||||
try:
|
||||
await api.async_generate_cert_if_missing()
|
||||
self.name, self.mac = await api.async_get_name_and_mac()
|
||||
except CannotConnect:
|
||||
# Likely invalid IP address or device is network unreachable. Stay
|
||||
# in the user step allowing the user to enter a different host.
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
await self.async_set_unique_id(format_mac(self.mac))
|
||||
if self.source == SOURCE_RECONFIGURE:
|
||||
self._abort_if_unique_id_mismatch()
|
||||
@@ -81,11 +86,10 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
},
|
||||
)
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: self.host})
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
# Likely invalid IP address or device is network unreachable. Stay
|
||||
# in the user step allowing the user to enter a different host.
|
||||
errors["base"] = "cannot_connect"
|
||||
try:
|
||||
return await self._async_start_pair()
|
||||
except (CannotConnect, ConnectionClosed):
|
||||
errors["base"] = "cannot_connect"
|
||||
else:
|
||||
user_input = {}
|
||||
default_host = user_input.get(CONF_HOST, vol.UNDEFINED)
|
||||
@@ -112,22 +116,9 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the pair step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
pin = user_input["pin"]
|
||||
try:
|
||||
pin = user_input["pin"]
|
||||
await self.api.async_finish_pairing(pin)
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_NAME: self.name,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
except InvalidAuth:
|
||||
# Invalid PIN. Stay in the pair step allowing the user to enter
|
||||
# a different PIN.
|
||||
@@ -145,6 +136,20 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
# them to enter a new IP address but we cannot do that for the zeroconf
|
||||
# flow. Simpler to abort for both flows.
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
else:
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(), reload_even_if_entry_is_unchanged=True
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=self.name,
|
||||
data={
|
||||
CONF_HOST: self.host,
|
||||
CONF_NAME: self.name,
|
||||
CONF_MAC: self.mac,
|
||||
},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="pair",
|
||||
data_schema=STEP_PAIR_DATA_SCHEMA,
|
||||
@@ -282,7 +287,9 @@ class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithReload):
|
||||
{
|
||||
vol.Optional(CONF_APPS): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=apps, mode=SelectSelectorMode.DROPDOWN
|
||||
options=apps,
|
||||
mode=SelectSelectorMode.DROPDOWN,
|
||||
translation_key="apps",
|
||||
)
|
||||
),
|
||||
vol.Required(
|
||||
|
||||
@@ -6,7 +6,7 @@ from typing import Any
|
||||
|
||||
from androidtvremote2 import AndroidTVRemote, ConnectionClosed
|
||||
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.const import CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -28,8 +28,6 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
self._api = api
|
||||
self._host = config_entry.data[CONF_HOST]
|
||||
self._name = config_entry.data[CONF_NAME]
|
||||
self._apps: dict[str, Any] = config_entry.options.get(CONF_APPS, {})
|
||||
self._attr_unique_id = config_entry.unique_id
|
||||
self._attr_is_on = api.is_on
|
||||
@@ -39,7 +37,7 @@ class AndroidTVRemoteBaseEntity(Entity):
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, config_entry.data[CONF_MAC])},
|
||||
identifiers={(DOMAIN, config_entry.unique_id)},
|
||||
name=self._name,
|
||||
name=config_entry.data[CONF_NAME],
|
||||
manufacturer=device_info["manufacturer"],
|
||||
model=device_info["model"],
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["androidtvremote2"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["androidtvremote2==0.2.3"],
|
||||
"zeroconf": ["_androidtvremote2._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -175,7 +175,11 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
"""Play a piece of media."""
|
||||
if media_type == MediaType.CHANNEL:
|
||||
if not media_id.isnumeric():
|
||||
raise ValueError(f"Channel must be numeric: {media_id}")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_channel",
|
||||
translation_placeholders={"media_id": media_id},
|
||||
)
|
||||
if self._channel_set_task:
|
||||
self._channel_set_task.cancel()
|
||||
self._channel_set_task = asyncio.create_task(
|
||||
@@ -188,7 +192,11 @@ class AndroidTVRemoteMediaPlayerEntity(AndroidTVRemoteBaseEntity, MediaPlayerEnt
|
||||
self._send_launch_app_command(media_id)
|
||||
return
|
||||
|
||||
raise ValueError(f"Invalid media type: {media_type}")
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_media_type",
|
||||
translation_placeholders={"media_type": media_type},
|
||||
)
|
||||
|
||||
async def async_browse_media(
|
||||
self,
|
||||
|
||||
78
homeassistant/components/androidtv_remote/quality_scale.yaml
Normal file
78
homeassistant/components/androidtv_remote/quality_scale.yaml
Normal file
@@ -0,0 +1,78 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No integration-specific service actions are defined.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: This is a push-based integration.
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: done
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: The integration is configured on a per-device basis, so there are no dynamic devices to add.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: All entities are primary and do not require a specific category.
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: The integration provides only primary entities that should be enabled.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: Icons are provided by the entity's device class, and no state-based icons are needed.
|
||||
reconfiguration-flow: done
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: The integration uses the reauth flow for authentication issues, and no other repairable issues have been identified.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: The integration manages a single device per config entry. Stale device removal is handled by removing the config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession:
|
||||
status: exempt
|
||||
comment: The underlying library does not use HTTP for communication.
|
||||
strict-typing: done
|
||||
@@ -22,7 +22,7 @@
|
||||
},
|
||||
"zeroconf_confirm": {
|
||||
"title": "Discovered Android TV",
|
||||
"description": "Do you want to add the Android TV ({name}) to Home Assistant? It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen."
|
||||
"description": "Do you want to add the Android TV ({name}) to Home Assistant? It will turn on and a pairing code will be displayed on it that you will need to enter in the next screen."
|
||||
},
|
||||
"pair": {
|
||||
"description": "Enter the pairing code displayed on the Android TV ({name}).",
|
||||
@@ -85,6 +85,19 @@
|
||||
"exceptions": {
|
||||
"connection_closed": {
|
||||
"message": "Connection to the Android TV device is closed"
|
||||
},
|
||||
"invalid_channel": {
|
||||
"message": "Channel must be numeric: {media_id}"
|
||||
},
|
||||
"invalid_media_type": {
|
||||
"message": "Invalid media type: {media_type}"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"apps": {
|
||||
"options": {
|
||||
"add_new": "Add new"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,5 +5,5 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/aosmith",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["py-aosmith==1.0.12"]
|
||||
"requirements": ["py-aosmith==1.0.14"]
|
||||
}
|
||||
|
||||
@@ -9,7 +9,7 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import APCUPSdConfigEntry, APCUPSdCoordinator
|
||||
|
||||
PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR)
|
||||
PLATFORMS: Final = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
|
||||
@@ -100,6 +100,7 @@ class APCUPSdCoordinator(DataUpdateCoordinator[APCUPSdData]):
|
||||
name=self.data.name or "APC UPS",
|
||||
hw_version=self.data.get("FIRMWARE"),
|
||||
sw_version=self.data.get("VERSION"),
|
||||
serial_number=self.data.serial_no,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> APCUPSdData:
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/apcupsd",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["apcaccess"],
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioapcaccess==0.4.2"]
|
||||
}
|
||||
|
||||
@@ -43,10 +43,7 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
The integration does not require authentication.
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Patch `aioapcaccess.request_status` where we use it.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
|
||||
@@ -103,6 +103,7 @@ async def async_pipeline_from_audio_stream(
|
||||
wake_word_settings: WakeWordSettings | None = None,
|
||||
audio_settings: AudioSettings | None = None,
|
||||
device_id: str | None = None,
|
||||
satellite_id: str | None = None,
|
||||
start_stage: PipelineStage = PipelineStage.STT,
|
||||
end_stage: PipelineStage = PipelineStage.TTS,
|
||||
conversation_extra_system_prompt: str | None = None,
|
||||
@@ -115,6 +116,7 @@ async def async_pipeline_from_audio_stream(
|
||||
pipeline_input = PipelineInput(
|
||||
session=session,
|
||||
device_id=device_id,
|
||||
satellite_id=satellite_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"domain": "assist_pipeline",
|
||||
"name": "Assist pipeline",
|
||||
"after_dependencies": ["repairs"],
|
||||
"codeowners": ["@balloob", "@synesthesiam"],
|
||||
"codeowners": ["@synesthesiam", "@arturpragacz"],
|
||||
"dependencies": ["conversation", "stt", "tts", "wake_word"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_pipeline",
|
||||
"integration_type": "system",
|
||||
|
||||
@@ -583,6 +583,9 @@ class PipelineRun:
|
||||
_device_id: str | None = None
|
||||
"""Optional device id set during run start."""
|
||||
|
||||
_satellite_id: str | None = None
|
||||
"""Optional satellite id set during run start."""
|
||||
|
||||
_conversation_data: PipelineConversationData | None = None
|
||||
"""Data tied to the conversation ID."""
|
||||
|
||||
@@ -636,9 +639,12 @@ class PipelineRun:
|
||||
return
|
||||
pipeline_data.pipeline_debug[self.pipeline.id][self.id].events.append(event)
|
||||
|
||||
def start(self, conversation_id: str, device_id: str | None) -> None:
|
||||
def start(
|
||||
self, conversation_id: str, device_id: str | None, satellite_id: str | None
|
||||
) -> None:
|
||||
"""Emit run start event."""
|
||||
self._device_id = device_id
|
||||
self._satellite_id = satellite_id
|
||||
self._start_debug_recording_thread()
|
||||
|
||||
data: dict[str, Any] = {
|
||||
@@ -646,6 +652,8 @@ class PipelineRun:
|
||||
"language": self.language,
|
||||
"conversation_id": conversation_id,
|
||||
}
|
||||
if satellite_id is not None:
|
||||
data["satellite_id"] = satellite_id
|
||||
if self.runner_data is not None:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
@@ -1057,7 +1065,6 @@ class PipelineRun:
|
||||
self,
|
||||
intent_input: str,
|
||||
conversation_id: str,
|
||||
device_id: str | None,
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
@@ -1088,7 +1095,8 @@ class PipelineRun:
|
||||
"language": input_language,
|
||||
"intent_input": intent_input,
|
||||
"conversation_id": conversation_id,
|
||||
"device_id": device_id,
|
||||
"device_id": self._device_id,
|
||||
"satellite_id": self._satellite_id,
|
||||
"prefer_local_intents": self.pipeline.prefer_local_intents,
|
||||
},
|
||||
)
|
||||
@@ -1099,7 +1107,8 @@ class PipelineRun:
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
@@ -1269,6 +1278,7 @@ class PipelineRun:
|
||||
text=user_input.text,
|
||||
conversation_id=user_input.conversation_id,
|
||||
device_id=user_input.device_id,
|
||||
satellite_id=user_input.satellite_id,
|
||||
context=user_input.context,
|
||||
language=user_input.language,
|
||||
agent_id=user_input.agent_id,
|
||||
@@ -1567,10 +1577,15 @@ class PipelineInput:
|
||||
device_id: str | None = None
|
||||
"""Identifier of the device that is processing the input/output of the pipeline."""
|
||||
|
||||
satellite_id: str | None = None
|
||||
"""Identifier of the satellite that is processing the input/output of the pipeline."""
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
self.run.start(
|
||||
conversation_id=self.session.conversation_id, device_id=self.device_id
|
||||
conversation_id=self.session.conversation_id,
|
||||
device_id=self.device_id,
|
||||
satellite_id=self.satellite_id,
|
||||
)
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||
@@ -1656,7 +1671,6 @@ class PipelineInput:
|
||||
tts_input = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.session.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
if tts_input.strip():
|
||||
|
||||
@@ -522,6 +522,7 @@ class AssistSatelliteEntity(entity.Entity):
|
||||
pipeline_id=self._resolve_pipeline(),
|
||||
conversation_id=session.conversation_id,
|
||||
device_id=device_id,
|
||||
satellite_id=self.entity_id,
|
||||
tts_audio_output=self.tts_options,
|
||||
wake_word_phrase=wake_word_phrase,
|
||||
audio_settings=AudioSettings(
|
||||
|
||||
@@ -75,7 +75,6 @@ class BroadcastIntentHandler(intent.IntentHandler):
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
intent.IntentResponseTarget(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "assist_satellite",
|
||||
"name": "Assist Satellite",
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam"],
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
||||
"dependencies": ["assist_pipeline", "http", "stt", "tts"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/assist_satellite",
|
||||
"integration_type": "entity",
|
||||
|
||||
@@ -124,6 +124,8 @@ class AsusWrtBridge(ABC):
|
||||
self._firmware: str | None = None
|
||||
self._label_mac: str | None = None
|
||||
self._model: str | None = None
|
||||
self._model_id: str | None = None
|
||||
self._serial_number: str | None = None
|
||||
|
||||
@property
|
||||
def host(self) -> str:
|
||||
@@ -145,6 +147,16 @@ class AsusWrtBridge(ABC):
|
||||
"""Return model information."""
|
||||
return self._model
|
||||
|
||||
@property
|
||||
def model_id(self) -> str | None:
|
||||
"""Return model_id information."""
|
||||
return self._model_id
|
||||
|
||||
@property
|
||||
def serial_number(self) -> str | None:
|
||||
"""Return serial number information."""
|
||||
return self._serial_number
|
||||
|
||||
@property
|
||||
@abstractmethod
|
||||
def is_connected(self) -> bool:
|
||||
@@ -361,6 +373,8 @@ class AsusWrtHttpBridge(AsusWrtBridge):
|
||||
self._label_mac = format_mac(mac)
|
||||
self._firmware = str(_identity.firmware)
|
||||
self._model = _identity.model
|
||||
self._model_id = _identity.product_id
|
||||
self._serial_number = _identity.serial
|
||||
|
||||
async def async_disconnect(self) -> None:
|
||||
"""Disconnect to the device."""
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.20.1"]
|
||||
"requirements": ["aioasuswrt==1.4.0", "asusrouter==1.21.0"]
|
||||
}
|
||||
|
||||
@@ -391,6 +391,8 @@ class AsusWrtRouter:
|
||||
identifiers={(DOMAIN, self._entry.unique_id or "AsusWRT")},
|
||||
name=self.host,
|
||||
model=self._api.model or "Asus Router",
|
||||
model_id=self._api.model_id,
|
||||
serial_number=self._api.serial_number,
|
||||
manufacturer="Asus",
|
||||
configuration_url=f"http://{self.host}",
|
||||
)
|
||||
|
||||
@@ -555,10 +555,6 @@ class BayesianBinarySensor(BinarySensorEntity):
|
||||
for observation in self._observations:
|
||||
if observation.value_template is None:
|
||||
continue
|
||||
if isinstance(observation.value_template, str):
|
||||
observation.value_template = Template(
|
||||
observation.value_template, hass=self.hass
|
||||
)
|
||||
template = observation.value_template
|
||||
observations_by_template.setdefault(template, []).append(observation)
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluesound",
|
||||
"iot_class": "local_polling",
|
||||
"requirements": ["pyblu==2.0.4"],
|
||||
"requirements": ["pyblu==2.0.5"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_musc._tcp.local."
|
||||
|
||||
@@ -321,8 +321,14 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
if self.available is False or (self.is_grouped and not self.is_leader):
|
||||
return None
|
||||
|
||||
sources = [x.text for x in self._inputs]
|
||||
sources += [x.name for x in self._presets]
|
||||
sources = [x.name for x in self._presets]
|
||||
|
||||
# ignore if both id and text are None
|
||||
for input_ in self._inputs:
|
||||
if input_.text is not None:
|
||||
sources.append(input_.text)
|
||||
elif input_.id is not None:
|
||||
sources.append(input_.id)
|
||||
|
||||
return sources
|
||||
|
||||
@@ -340,7 +346,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
input_.id == self._status.input_id
|
||||
or input_.url == self._status.stream_url
|
||||
):
|
||||
return input_.text
|
||||
return input_.text if input_.text is not None else input_.id
|
||||
|
||||
for preset in self._presets:
|
||||
if preset.url == self._status.stream_url:
|
||||
@@ -537,7 +543,7 @@ class BluesoundPlayer(CoordinatorEntity[BluesoundCoordinator], MediaPlayerEntity
|
||||
|
||||
# presets and inputs might have the same name; presets have priority
|
||||
for input_ in self._inputs:
|
||||
if input_.text == source:
|
||||
if source in (input_.text, input_.id):
|
||||
await self._player.play_url(input_.url)
|
||||
return
|
||||
for preset in self._presets:
|
||||
|
||||
@@ -57,6 +57,7 @@ from .api import (
|
||||
_get_manager,
|
||||
async_address_present,
|
||||
async_ble_device_from_address,
|
||||
async_current_scanners,
|
||||
async_discovered_service_info,
|
||||
async_get_advertisement_callback,
|
||||
async_get_fallback_availability_interval,
|
||||
@@ -114,6 +115,7 @@ __all__ = [
|
||||
"HomeAssistantRemoteScanner",
|
||||
"async_address_present",
|
||||
"async_ble_device_from_address",
|
||||
"async_current_scanners",
|
||||
"async_discovered_service_info",
|
||||
"async_get_advertisement_callback",
|
||||
"async_get_fallback_availability_interval",
|
||||
|
||||
@@ -66,6 +66,22 @@ def async_scanner_count(hass: HomeAssistant, connectable: bool = True) -> int:
|
||||
return _get_manager(hass).async_scanner_count(connectable)
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_current_scanners(hass: HomeAssistant) -> list[BaseHaScanner]:
|
||||
"""Return the list of currently active scanners.
|
||||
|
||||
This method returns a list of all active Bluetooth scanners registered
|
||||
with Home Assistant, including both connectable and non-connectable scanners.
|
||||
|
||||
Args:
|
||||
hass: Home Assistant instance
|
||||
|
||||
Returns:
|
||||
List of all active scanner instances
|
||||
"""
|
||||
return _get_manager(hass).async_current_scanners()
|
||||
|
||||
|
||||
@hass_callback
|
||||
def async_discovered_service_info(
|
||||
hass: HomeAssistant, connectable: bool = True
|
||||
|
||||
@@ -8,8 +8,19 @@ import itertools
|
||||
import logging
|
||||
|
||||
from bleak_retry_connector import BleakSlotManager
|
||||
from bluetooth_adapters import BluetoothAdapters
|
||||
from habluetooth import BaseHaRemoteScanner, BaseHaScanner, BluetoothManager
|
||||
from bluetooth_adapters import (
|
||||
ADAPTER_TYPE,
|
||||
BluetoothAdapters,
|
||||
adapter_human_name,
|
||||
adapter_model,
|
||||
)
|
||||
from habluetooth import (
|
||||
BaseHaRemoteScanner,
|
||||
BaseHaScanner,
|
||||
BluetoothManager,
|
||||
BluetoothScanningMode,
|
||||
HaScanner,
|
||||
)
|
||||
|
||||
from homeassistant import config_entries
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, EVENT_LOGGING_CHANGED
|
||||
@@ -19,8 +30,9 @@ from homeassistant.core import (
|
||||
HomeAssistant,
|
||||
callback as hass_callback,
|
||||
)
|
||||
from homeassistant.helpers import discovery_flow
|
||||
from homeassistant.helpers import discovery_flow, issue_registry as ir
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.util.package import is_docker_env
|
||||
|
||||
from .const import (
|
||||
CONF_SOURCE,
|
||||
@@ -314,3 +326,97 @@ class HomeAssistantBluetoothManager(BluetoothManager):
|
||||
address = discovery_key.key
|
||||
_LOGGER.debug("Rediscover address %s", address)
|
||||
self.async_rediscover_address(address)
|
||||
|
||||
def on_scanner_start(self, scanner: BaseHaScanner) -> None:
|
||||
"""Handle when a scanner starts.
|
||||
|
||||
Create or delete repair issues for local adapters based on degraded mode.
|
||||
"""
|
||||
super().on_scanner_start(scanner)
|
||||
|
||||
# Only handle repair issues for local adapters (HaScanner instances)
|
||||
if not isinstance(scanner, HaScanner):
|
||||
return
|
||||
self.async_check_degraded_mode(scanner)
|
||||
self.async_check_scanning_mode(scanner)
|
||||
|
||||
@hass_callback
|
||||
def async_check_scanning_mode(self, scanner: HaScanner) -> None:
|
||||
"""Check if the scanner is running in passive mode when active mode is requested."""
|
||||
passive_mode_issue_id = f"bluetooth_adapter_passive_mode_{scanner.source}"
|
||||
|
||||
# Check if scanner is NOT in passive mode when active mode was requested
|
||||
if not (
|
||||
scanner.requested_mode is BluetoothScanningMode.ACTIVE
|
||||
and scanner.current_mode is BluetoothScanningMode.PASSIVE
|
||||
):
|
||||
# Delete passive mode issue if it exists and we're not in passive fallback
|
||||
ir.async_delete_issue(self.hass, DOMAIN, passive_mode_issue_id)
|
||||
return
|
||||
|
||||
# Create repair issue for passive mode fallback
|
||||
adapter_name = adapter_human_name(
|
||||
scanner.adapter, scanner.mac_address or "00:00:00:00:00:00"
|
||||
)
|
||||
adapter_details = self._bluetooth_adapters.adapters.get(scanner.adapter)
|
||||
model = adapter_model(adapter_details) if adapter_details else None
|
||||
|
||||
# Determine adapter type for specific instructions
|
||||
# Default to USB for any other type or unknown
|
||||
if adapter_details and adapter_details.get(ADAPTER_TYPE) == "uart":
|
||||
translation_key = "bluetooth_adapter_passive_mode_uart"
|
||||
else:
|
||||
translation_key = "bluetooth_adapter_passive_mode_usb"
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
passive_mode_issue_id,
|
||||
is_fixable=False, # Requires a reboot or unplug
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key=translation_key,
|
||||
translation_placeholders={
|
||||
"adapter": adapter_name,
|
||||
"model": model or "Unknown",
|
||||
},
|
||||
)
|
||||
|
||||
@hass_callback
|
||||
def async_check_degraded_mode(self, scanner: HaScanner) -> None:
|
||||
"""Check if we are in degraded mode and create/delete repair issues."""
|
||||
issue_id = f"bluetooth_adapter_missing_permissions_{scanner.source}"
|
||||
|
||||
# Delete any existing issue if not in degraded mode
|
||||
if not self.is_operating_degraded():
|
||||
ir.async_delete_issue(self.hass, DOMAIN, issue_id)
|
||||
return
|
||||
|
||||
# Only create repair issues for Docker-based installations where users
|
||||
# can fix permissions. This includes: Home Assistant Supervised,
|
||||
# Home Assistant Container, and third-party containers
|
||||
if not is_docker_env():
|
||||
return
|
||||
|
||||
# Create repair issue for degraded mode in Docker (including Supervised)
|
||||
adapter_name = adapter_human_name(
|
||||
scanner.adapter, scanner.mac_address or "00:00:00:00:00:00"
|
||||
)
|
||||
|
||||
# Try to get adapter details from the bluetooth adapters
|
||||
adapter_details = self._bluetooth_adapters.adapters.get(scanner.adapter)
|
||||
model = adapter_model(adapter_details) if adapter_details else None
|
||||
|
||||
ir.async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
issue_id,
|
||||
is_fixable=False, # Not fixable from within HA - requires
|
||||
# container restart with new permissions
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="bluetooth_adapter_missing_permissions",
|
||||
translation_placeholders={
|
||||
"adapter": adapter_name,
|
||||
"model": model or "Unknown",
|
||||
"docs_url": "https://www.home-assistant.io/integrations/bluetooth/#additional-details-for-container",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -21,6 +21,6 @@
|
||||
"bluetooth-auto-recovery==1.5.2",
|
||||
"bluetooth-data-tools==1.28.2",
|
||||
"dbus-fast==2.44.3",
|
||||
"habluetooth==5.3.0"
|
||||
"habluetooth==5.6.2"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -38,5 +38,19 @@
|
||||
"remote_adapters_not_supported": "Bluetooth configuration for remote adapters is not supported.",
|
||||
"local_adapters_no_passive_support": "Local Bluetooth adapters that do not support passive scanning cannot be configured."
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"bluetooth_adapter_missing_permissions": {
|
||||
"title": "Bluetooth adapter requires additional permissions",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is operating in degraded mode because your container needs additional permissions to fully access Bluetooth hardware.\n\nPlease follow the instructions in our documentation to add the required permissions:\n[Bluetooth permissions for Docker]({docs_url})\n\nAfter adding these permissions, restart your Home Assistant container for the changes to take effect."
|
||||
},
|
||||
"bluetooth_adapter_passive_mode_usb": {
|
||||
"title": "Bluetooth USB adapter requires manual power cycle",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is stuck in passive scanning mode despite requesting active scanning mode. **Automatic recovery was attempted but failed.** This is likely a kernel, firmware, or operating system issue, and the adapter requires a manual power cycle to recover.\n\nIn passive mode, the adapter can only receive advertisements but cannot request additional data from devices, which will affect device discovery and functionality.\n\n**Manual intervention required:**\n1. **Unplug the USB adapter**\n2. Wait 5 seconds\n3. **Plug it back in**\n4. Wait for Home Assistant to detect the adapter\n\nIf the issue persists after power cycling:\n- Try a different USB port\n- Check for kernel/firmware updates\n- Consider using a different Bluetooth adapter"
|
||||
},
|
||||
"bluetooth_adapter_passive_mode_uart": {
|
||||
"title": "Bluetooth adapter requires system power cycle",
|
||||
"description": "The Bluetooth adapter **{adapter}** ({model}) is stuck in passive scanning mode despite requesting active scanning mode. **Automatic recovery was attempted but failed.** This is likely a kernel, firmware, or operating system issue, and the system requires a complete power cycle to recover the adapter.\n\nIn passive mode, the adapter can only receive advertisements but cannot request additional data from devices, which will affect device discovery and functionality.\n\n**Manual intervention required:**\n1. **Shut down the system completely** (not just a reboot)\n2. **Remove power** (unplug or turn off at the switch)\n3. Wait 10 seconds\n4. Restore power and boot the system\n\nIf the issue persists after power cycling:\n- Check for kernel/firmware updates\n- The onboard Bluetooth adapter may have hardware issues"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,8 +8,10 @@ import time
|
||||
from typing import Any
|
||||
|
||||
from habluetooth import (
|
||||
BaseHaScanner,
|
||||
BluetoothScanningMode,
|
||||
HaBluetoothSlotAllocations,
|
||||
HaScannerModeChange,
|
||||
HaScannerRegistration,
|
||||
HaScannerRegistrationEvent,
|
||||
)
|
||||
@@ -27,12 +29,54 @@ from .models import BluetoothChange
|
||||
from .util import InvalidConfigEntryID, InvalidSource, config_entry_id_to_source
|
||||
|
||||
|
||||
@callback
|
||||
def _async_get_source_from_config_entry(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg_id: int,
|
||||
config_entry_id: str | None,
|
||||
validate_source: bool = True,
|
||||
) -> str | None:
|
||||
"""Get source from config entry id.
|
||||
|
||||
Returns None if no config_entry_id provided or on error (after sending error response).
|
||||
If validate_source is True, also validates that the scanner exists.
|
||||
"""
|
||||
if not config_entry_id:
|
||||
return None
|
||||
|
||||
if validate_source:
|
||||
# Use the full validation that checks if scanner exists
|
||||
try:
|
||||
return config_entry_id_to_source(hass, config_entry_id)
|
||||
except InvalidConfigEntryID as err:
|
||||
connection.send_error(msg_id, "invalid_config_entry_id", str(err))
|
||||
return None
|
||||
except InvalidSource as err:
|
||||
connection.send_error(msg_id, "invalid_source", str(err))
|
||||
return None
|
||||
|
||||
# Just check if config entry exists and belongs to bluetooth
|
||||
if (
|
||||
not (entry := hass.config_entries.async_get_entry(config_entry_id))
|
||||
or entry.domain != DOMAIN
|
||||
):
|
||||
connection.send_error(
|
||||
msg_id,
|
||||
"invalid_config_entry_id",
|
||||
f"Config entry {config_entry_id} not found",
|
||||
)
|
||||
return None
|
||||
return entry.unique_id
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the bluetooth websocket API."""
|
||||
websocket_api.async_register_command(hass, ws_subscribe_advertisements)
|
||||
websocket_api.async_register_command(hass, ws_subscribe_connection_allocations)
|
||||
websocket_api.async_register_command(hass, ws_subscribe_scanner_details)
|
||||
websocket_api.async_register_command(hass, ws_subscribe_scanner_state)
|
||||
|
||||
|
||||
@lru_cache(maxsize=1024)
|
||||
@@ -180,16 +224,12 @@ async def ws_subscribe_connection_allocations(
|
||||
) -> None:
|
||||
"""Handle subscribe advertisements websocket command."""
|
||||
ws_msg_id = msg["id"]
|
||||
source: str | None = None
|
||||
if config_entry_id := msg.get("config_entry_id"):
|
||||
try:
|
||||
source = config_entry_id_to_source(hass, config_entry_id)
|
||||
except InvalidConfigEntryID as err:
|
||||
connection.send_error(ws_msg_id, "invalid_config_entry_id", str(err))
|
||||
return
|
||||
except InvalidSource as err:
|
||||
connection.send_error(ws_msg_id, "invalid_source", str(err))
|
||||
return
|
||||
config_entry_id = msg.get("config_entry_id")
|
||||
source = _async_get_source_from_config_entry(
|
||||
hass, connection, ws_msg_id, config_entry_id
|
||||
)
|
||||
if config_entry_id and source is None:
|
||||
return # Error already sent by helper
|
||||
|
||||
def _async_allocations_changed(allocations: HaBluetoothSlotAllocations) -> None:
|
||||
connection.send_message(
|
||||
@@ -220,20 +260,12 @@ async def ws_subscribe_scanner_details(
|
||||
) -> None:
|
||||
"""Handle subscribe scanner details websocket command."""
|
||||
ws_msg_id = msg["id"]
|
||||
source: str | None = None
|
||||
if config_entry_id := msg.get("config_entry_id"):
|
||||
if (
|
||||
not (entry := hass.config_entries.async_get_entry(config_entry_id))
|
||||
or entry.domain != DOMAIN
|
||||
):
|
||||
connection.send_error(
|
||||
ws_msg_id,
|
||||
"invalid_config_entry_id",
|
||||
f"Invalid config entry id: {config_entry_id}",
|
||||
)
|
||||
return
|
||||
source = entry.unique_id
|
||||
assert source is not None
|
||||
config_entry_id = msg.get("config_entry_id")
|
||||
source = _async_get_source_from_config_entry(
|
||||
hass, connection, ws_msg_id, config_entry_id, validate_source=False
|
||||
)
|
||||
if config_entry_id and source is None:
|
||||
return # Error already sent by helper
|
||||
|
||||
def _async_event_message(message: dict[str, Any]) -> None:
|
||||
connection.send_message(
|
||||
@@ -260,3 +292,70 @@ async def ws_subscribe_scanner_details(
|
||||
]
|
||||
):
|
||||
_async_event_message({"add": matching_scanners})
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "bluetooth/subscribe_scanner_state",
|
||||
vol.Optional("config_entry_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def ws_subscribe_scanner_state(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle subscribe scanner state websocket command."""
|
||||
ws_msg_id = msg["id"]
|
||||
config_entry_id = msg.get("config_entry_id")
|
||||
source = _async_get_source_from_config_entry(
|
||||
hass, connection, ws_msg_id, config_entry_id, validate_source=False
|
||||
)
|
||||
if config_entry_id and source is None:
|
||||
return # Error already sent by helper
|
||||
|
||||
@callback
|
||||
def _async_send_scanner_state(
|
||||
scanner: BaseHaScanner,
|
||||
current_mode: BluetoothScanningMode | None,
|
||||
requested_mode: BluetoothScanningMode | None,
|
||||
) -> None:
|
||||
payload = {
|
||||
"source": scanner.source,
|
||||
"adapter": scanner.adapter,
|
||||
"current_mode": current_mode.value if current_mode else None,
|
||||
"requested_mode": requested_mode.value if requested_mode else None,
|
||||
}
|
||||
connection.send_message(
|
||||
json_bytes(
|
||||
websocket_api.event_message(
|
||||
ws_msg_id,
|
||||
payload,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_scanner_state_changed(mode_change: HaScannerModeChange) -> None:
|
||||
_async_send_scanner_state(
|
||||
mode_change.scanner,
|
||||
mode_change.current_mode,
|
||||
mode_change.requested_mode,
|
||||
)
|
||||
|
||||
manager = _get_manager(hass)
|
||||
connection.subscriptions[ws_msg_id] = (
|
||||
manager.async_register_scanner_mode_change_callback(
|
||||
_async_scanner_state_changed, source
|
||||
)
|
||||
)
|
||||
connection.send_message(json_bytes(websocket_api.result_message(ws_msg_id)))
|
||||
|
||||
# Send initial state for all matching scanners
|
||||
for scanner in manager.async_current_scanners():
|
||||
if source is None or scanner.source == source:
|
||||
_async_send_scanner_state(
|
||||
scanner,
|
||||
scanner.current_mode,
|
||||
scanner.requested_mode,
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/bmw_connected_drive",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["bimmer_connected"],
|
||||
"requirements": ["bimmer-connected[china]==0.17.2"]
|
||||
"requirements": ["bimmer-connected[china]==0.17.3"]
|
||||
}
|
||||
|
||||
@@ -164,10 +164,6 @@
|
||||
"name": "[%key:component::notify::services::notify::name%]",
|
||||
"description": "Sends a mobile push notification to members of a shared Bring! list.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "List",
|
||||
"description": "Bring! list whose members (except sender) will be notified."
|
||||
},
|
||||
"message": {
|
||||
"name": "Notification type",
|
||||
"description": "Type of push notification to send to list members."
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["brother", "pyasn1", "pysmi", "pysnmp"],
|
||||
"requirements": ["brother==5.0.1"],
|
||||
"requirements": ["brother==5.1.0"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_printer._tcp.local.",
|
||||
|
||||
@@ -85,10 +85,10 @@
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"current_temperature": {
|
||||
"name": "Current Temperature"
|
||||
"name": "Current temperature"
|
||||
},
|
||||
"outside_temperature": {
|
||||
"name": "Outside Temperature"
|
||||
"name": "Outside temperature"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -81,7 +81,11 @@ from .const import (
|
||||
)
|
||||
from .helper import get_camera_from_entity_id
|
||||
from .img_util import scale_jpeg_camera_image
|
||||
from .prefs import CameraPreferences, DynamicStreamSettings # noqa: F401
|
||||
from .prefs import (
|
||||
CameraPreferences,
|
||||
DynamicStreamSettings, # noqa: F401
|
||||
get_dynamic_camera_stream_settings,
|
||||
)
|
||||
from .webrtc import (
|
||||
DATA_ICE_SERVERS,
|
||||
CameraWebRTCProvider,
|
||||
@@ -550,9 +554,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
self.hass,
|
||||
source,
|
||||
options=self.stream_options,
|
||||
dynamic_stream_settings=await self.hass.data[
|
||||
DATA_CAMERA_PREFS
|
||||
].get_dynamic_stream_settings(self.entity_id),
|
||||
dynamic_stream_settings=await get_dynamic_camera_stream_settings(
|
||||
self.hass, self.entity_id
|
||||
),
|
||||
stream_label=self.entity_id,
|
||||
)
|
||||
self.stream.set_update_callback(self.async_write_ha_state)
|
||||
@@ -942,9 +946,7 @@ async def websocket_get_prefs(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle request for account info."""
|
||||
stream_prefs = await hass.data[DATA_CAMERA_PREFS].get_dynamic_stream_settings(
|
||||
msg["entity_id"]
|
||||
)
|
||||
stream_prefs = await get_dynamic_camera_stream_settings(hass, msg["entity_id"])
|
||||
connection.send_result(msg["id"], asdict(stream_prefs))
|
||||
|
||||
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from .const import DOMAIN, PREF_ORIENTATION, PREF_PRELOAD_STREAM
|
||||
from .const import DATA_CAMERA_PREFS, DOMAIN, PREF_ORIENTATION, PREF_PRELOAD_STREAM
|
||||
|
||||
STORAGE_KEY: Final = DOMAIN
|
||||
STORAGE_VERSION: Final = 1
|
||||
@@ -106,3 +106,12 @@ class CameraPreferences:
|
||||
)
|
||||
self._dynamic_stream_settings_by_entity_id[entity_id] = settings
|
||||
return settings
|
||||
|
||||
|
||||
async def get_dynamic_camera_stream_settings(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> DynamicStreamSettings:
|
||||
"""Get dynamic stream settings for a camera entity."""
|
||||
if DATA_CAMERA_PREFS not in hass.data:
|
||||
raise HomeAssistantError("Camera integration not set up")
|
||||
return await hass.data[DATA_CAMERA_PREFS].get_dynamic_stream_settings(entity_id)
|
||||
|
||||
@@ -3,7 +3,8 @@
|
||||
import logging
|
||||
import threading
|
||||
|
||||
import pychromecast
|
||||
import pychromecast.discovery
|
||||
import pychromecast.models
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
|
||||
|
||||
@@ -11,10 +11,13 @@ from uuid import UUID
|
||||
|
||||
import aiohttp
|
||||
import attr
|
||||
import pychromecast
|
||||
from pychromecast import dial
|
||||
from pychromecast.const import CAST_TYPE_GROUP
|
||||
import pychromecast.controllers.media
|
||||
import pychromecast.controllers.multizone
|
||||
import pychromecast.controllers.receiver
|
||||
from pychromecast.models import CastInfo
|
||||
import pychromecast.socket_client
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
@@ -10,8 +10,10 @@ import json
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, Concatenate
|
||||
|
||||
import pychromecast
|
||||
import pychromecast.config
|
||||
import pychromecast.const
|
||||
from pychromecast.controllers.homeassistant import HomeAssistantController
|
||||
import pychromecast.controllers.media
|
||||
from pychromecast.controllers.media import (
|
||||
MEDIA_PLAYER_ERROR_CODES,
|
||||
MEDIA_PLAYER_STATE_BUFFERING,
|
||||
|
||||
@@ -89,7 +89,6 @@ class SetTemperatureIntent(intent.IntentHandler):
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_results(
|
||||
success_results=[
|
||||
intent.IntentResponseTarget(
|
||||
|
||||
@@ -274,16 +274,16 @@
|
||||
"message": "Provided temperature {check_temp} is not valid. Accepted range is {min_temp} to {max_temp}."
|
||||
},
|
||||
"low_temp_higher_than_high_temp": {
|
||||
"message": "Target temperature low can not be higher than Target temperature high."
|
||||
"message": "'Lower target temperature' can not be higher than 'Upper target temperature'."
|
||||
},
|
||||
"humidity_out_of_range": {
|
||||
"message": "Provided humidity {humidity} is not valid. Accepted range is {min_humidity} to {max_humidity}."
|
||||
},
|
||||
"missing_target_temperature_entity_feature": {
|
||||
"message": "Set temperature action was used with the target temperature parameter but the entity does not support it."
|
||||
"message": "Set temperature action was used with the 'Target temperature' parameter but the entity does not support it."
|
||||
},
|
||||
"missing_target_temperature_range_entity_feature": {
|
||||
"message": "Set temperature action was used with the target temperature low/high parameter but the entity does not support it."
|
||||
"message": "Set temperature action was used with the 'Lower/Upper target temperature' parameter but the entity does not support it."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -71,6 +71,7 @@ async def async_converse(
|
||||
language: str | None = None,
|
||||
agent_id: str | None = None,
|
||||
device_id: str | None = None,
|
||||
satellite_id: str | None = None,
|
||||
extra_system_prompt: str | None = None,
|
||||
) -> ConversationResult:
|
||||
"""Process text and get intent."""
|
||||
@@ -97,6 +98,7 @@ async def async_converse(
|
||||
context=context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=device_id,
|
||||
satellite_id=satellite_id,
|
||||
language=language,
|
||||
agent_id=agent_id,
|
||||
extra_system_prompt=extra_system_prompt,
|
||||
|
||||
@@ -507,14 +507,18 @@ class ChatLog:
|
||||
async def async_provide_llm_data(
|
||||
self,
|
||||
llm_context: llm.LLMContext,
|
||||
user_llm_hass_api: str | list[str] | None = None,
|
||||
user_llm_hass_api: str | list[str] | llm.API | None = None,
|
||||
user_llm_prompt: str | None = None,
|
||||
user_extra_system_prompt: str | None = None,
|
||||
) -> None:
|
||||
"""Set the LLM system prompt."""
|
||||
llm_api: llm.APIInstance | None = None
|
||||
|
||||
if user_llm_hass_api:
|
||||
if user_llm_hass_api is None:
|
||||
pass
|
||||
elif isinstance(user_llm_hass_api, llm.API):
|
||||
llm_api = await user_llm_hass_api.async_get_api_instance(llm_context)
|
||||
else:
|
||||
try:
|
||||
llm_api = await llm.async_get_api(
|
||||
self.hass,
|
||||
|
||||
@@ -35,7 +35,7 @@ from hassil.recognize import (
|
||||
)
|
||||
from hassil.string_matcher import UnmatchedRangeEntity, UnmatchedTextEntity
|
||||
from hassil.trie import Trie
|
||||
from hassil.util import merge_dict
|
||||
from hassil.util import merge_dict, remove_punctuation
|
||||
from home_assistant_intents import (
|
||||
ErrorKey,
|
||||
FuzzyConfig,
|
||||
@@ -327,12 +327,10 @@ class DefaultAgent(ConversationEntity):
|
||||
|
||||
if self._exposed_names_trie is not None:
|
||||
# Filter by input string
|
||||
text_lower = user_input.text.strip().lower()
|
||||
text = remove_punctuation(user_input.text).strip().lower()
|
||||
slot_lists["name"] = TextSlotList(
|
||||
name="name",
|
||||
values=[
|
||||
result[2] for result in self._exposed_names_trie.find(text_lower)
|
||||
],
|
||||
values=[result[2] for result in self._exposed_names_trie.find(text)],
|
||||
)
|
||||
|
||||
start = time.monotonic()
|
||||
@@ -373,7 +371,6 @@ class DefaultAgent(ConversationEntity):
|
||||
response = intent.IntentResponse(
|
||||
language=user_input.language or self.hass.config.language
|
||||
)
|
||||
response.response_type = intent.IntentResponseType.ACTION_DONE
|
||||
response.async_set_speech(response_text)
|
||||
|
||||
if response is None:
|
||||
@@ -473,6 +470,7 @@ class DefaultAgent(ConversationEntity):
|
||||
language,
|
||||
assistant=DOMAIN,
|
||||
device_id=user_input.device_id,
|
||||
satellite_id=user_input.satellite_id,
|
||||
conversation_agent_id=user_input.agent_id,
|
||||
)
|
||||
except intent.MatchFailedError as match_error:
|
||||
@@ -1263,7 +1261,7 @@ class DefaultAgent(ConversationEntity):
|
||||
name_list = TextSlotList.from_tuples(exposed_entity_names, allow_template=False)
|
||||
for name_value in name_list.values:
|
||||
assert isinstance(name_value.text_in, TextChunk)
|
||||
name_text = name_value.text_in.text.strip().lower()
|
||||
name_text = remove_punctuation(name_value.text_in.text).strip().lower()
|
||||
self._exposed_names_trie.insert(name_text, name_value)
|
||||
|
||||
self._slot_lists = {
|
||||
|
||||
@@ -201,6 +201,7 @@ async def websocket_hass_agent_debug(
|
||||
context=connection.context(msg),
|
||||
conversation_id=None,
|
||||
device_id=msg.get("device_id"),
|
||||
satellite_id=None,
|
||||
language=msg.get("language", hass.config.language),
|
||||
agent_id=agent.entity_id,
|
||||
)
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "conversation",
|
||||
"name": "Conversation",
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam"],
|
||||
"codeowners": ["@home-assistant/core", "@synesthesiam", "@arturpragacz"],
|
||||
"dependencies": ["http", "intent"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
|
||||
@@ -37,6 +37,9 @@ class ConversationInput:
|
||||
device_id: str | None
|
||||
"""Unique identifier for the device."""
|
||||
|
||||
satellite_id: str | None
|
||||
"""Unique identifier for the satellite."""
|
||||
|
||||
language: str
|
||||
"""Language of the request."""
|
||||
|
||||
@@ -53,6 +56,7 @@ class ConversationInput:
|
||||
"context": self.context.as_dict(),
|
||||
"conversation_id": self.conversation_id,
|
||||
"device_id": self.device_id,
|
||||
"satellite_id": self.satellite_id,
|
||||
"language": self.language,
|
||||
"agent_id": self.agent_id,
|
||||
"extra_system_prompt": self.extra_system_prompt,
|
||||
|
||||
@@ -100,6 +100,7 @@ async def async_attach_trigger(
|
||||
entity_name: entity["value"] for entity_name, entity in details.items()
|
||||
},
|
||||
"device_id": user_input.device_id,
|
||||
"satellite_id": user_input.satellite_id,
|
||||
"user_input": user_input.as_dict(),
|
||||
}
|
||||
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["debugpy==1.8.14"]
|
||||
"requirements": ["debugpy==1.8.16"]
|
||||
}
|
||||
|
||||
@@ -99,7 +99,7 @@ T = TypeVar(
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class DeconzSensorDescription(Generic[T], SensorEntityDescription):
|
||||
class DeconzSensorDescription(SensorEntityDescription, Generic[T]):
|
||||
"""Class describing deCONZ binary sensor entities."""
|
||||
|
||||
instance_check: type[T] | None = None
|
||||
|
||||
@@ -19,8 +19,10 @@ from homeassistant.config_entries import (
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import AbortFlow
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.device_registry import format_mac
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from homeassistant.helpers.typing import VolDictType
|
||||
|
||||
@@ -103,6 +105,43 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Initialize the DoorBird config flow."""
|
||||
self.discovery_schema: vol.Schema | None = None
|
||||
|
||||
async def _async_verify_existing_device_for_discovery(
|
||||
self,
|
||||
existing_entry: ConfigEntry,
|
||||
host: str,
|
||||
macaddress: str,
|
||||
) -> None:
|
||||
"""Verify discovered device matches existing entry before updating IP.
|
||||
|
||||
This method performs the following verification steps:
|
||||
1. Ensures that the stored credentials work before updating the entry.
|
||||
2. Verifies that the device at the discovered IP address has the expected MAC address.
|
||||
"""
|
||||
info, errors = await self._async_validate_or_error(
|
||||
{
|
||||
**existing_entry.data,
|
||||
CONF_HOST: host,
|
||||
}
|
||||
)
|
||||
|
||||
if errors:
|
||||
_LOGGER.debug(
|
||||
"Cannot validate DoorBird at %s with existing credentials: %s",
|
||||
host,
|
||||
errors,
|
||||
)
|
||||
raise AbortFlow("cannot_connect")
|
||||
|
||||
# Verify the MAC address matches what was advertised
|
||||
if format_mac(info["mac_addr"]) != format_mac(macaddress):
|
||||
_LOGGER.debug(
|
||||
"DoorBird at %s reports MAC %s but zeroconf advertised %s, ignoring",
|
||||
host,
|
||||
info["mac_addr"],
|
||||
macaddress,
|
||||
)
|
||||
raise AbortFlow("wrong_device")
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
@@ -172,7 +211,22 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
await self.async_set_unique_id(macaddress)
|
||||
host = discovery_info.host
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
|
||||
# Check if we have an existing entry for this MAC
|
||||
existing_entry = self.hass.config_entries.async_entry_for_domain_unique_id(
|
||||
DOMAIN, macaddress
|
||||
)
|
||||
|
||||
if existing_entry:
|
||||
# Check if the host is actually changing
|
||||
if existing_entry.data.get(CONF_HOST) != host:
|
||||
await self._async_verify_existing_device_for_discovery(
|
||||
existing_entry, host, macaddress
|
||||
)
|
||||
|
||||
# All checks passed or no change needed, abort
|
||||
# if already configured with potential IP update
|
||||
self._abort_if_unique_id_configured(updates={CONF_HOST: host})
|
||||
|
||||
self._async_abort_entries_match({CONF_HOST: host})
|
||||
|
||||
|
||||
@@ -49,6 +49,8 @@
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"link_local_address": "Link local addresses are not supported",
|
||||
"not_doorbird_device": "This device is not a DoorBird",
|
||||
"not_ipv4_address": "Only IPv4 addresses are supported",
|
||||
"wrong_device": "Device MAC address does not match",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"flow_title": "{name} ({host})",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user