mirror of
https://github.com/home-assistant/core.git
synced 2025-09-21 19:09:35 +00:00
Compare commits
403 Commits
2024.11.0b
...
cloud_enab
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b23eacc7ad | ||
![]() |
5293fc73d8 | ||
![]() |
870bf388e0 | ||
![]() |
7a4dac1eb1 | ||
![]() |
88480d154a | ||
![]() |
5497c440d9 | ||
![]() |
1e26cf13d6 | ||
![]() |
0dd208a4b9 | ||
![]() |
c3492bc0ed | ||
![]() |
85bf8d1374 | ||
![]() |
e040eb0ff2 | ||
![]() |
d7f41ff8a9 | ||
![]() |
de5437f61e | ||
![]() |
c52a893e21 | ||
![]() |
f7f1830b7e | ||
![]() |
784ad20fb6 | ||
![]() |
0468e7e7a3 | ||
![]() |
88c227681d | ||
![]() |
3a37ff13a6 | ||
![]() |
73929e6791 | ||
![]() |
980b0fa5e6 | ||
![]() |
fbc4a87166 | ||
![]() |
7f9ec2a79e | ||
![]() |
d8b55d39e4 | ||
![]() |
ee41725b53 | ||
![]() |
ae1203336d | ||
![]() |
f10063c9be | ||
![]() |
1da4579a09 | ||
![]() |
7fd9339ad8 | ||
![]() |
de391fa98b | ||
![]() |
70211ab78e | ||
![]() |
a1a08f7755 | ||
![]() |
433321136d | ||
![]() |
0677bba5bd | ||
![]() |
d0ad834d93 | ||
![]() |
7d2d6a82b0 | ||
![]() |
e8dc62411a | ||
![]() |
7925007ab4 | ||
![]() |
7515deddab | ||
![]() |
e382f924e6 | ||
![]() |
7fdcb98518 | ||
![]() |
d0dbca41f7 | ||
![]() |
f3229c723c | ||
![]() |
cafa598fd6 | ||
![]() |
73a62a09b0 | ||
![]() |
ecd8dde347 | ||
![]() |
31a2bb1b98 | ||
![]() |
0fc019305e | ||
![]() |
adb1c59859 | ||
![]() |
5d0277a0d1 | ||
![]() |
21d81d5a5c | ||
![]() |
0de4bfcc2c | ||
![]() |
2cc5486794 | ||
![]() |
e3315383ab | ||
![]() |
31b505828b | ||
![]() |
b61580a937 | ||
![]() |
928e5348e4 | ||
![]() |
622682eb43 | ||
![]() |
97fa568876 | ||
![]() |
c10f078f2a | ||
![]() |
e6d16f06fc | ||
![]() |
c89ab7a142 | ||
![]() |
6837ea947c | ||
![]() |
5f0f29704b | ||
![]() |
1f43dc6676 | ||
![]() |
4d7405de2c | ||
![]() |
4adffdd1a6 | ||
![]() |
4e2f5bdb7d | ||
![]() |
03bc711c51 | ||
![]() |
8b8e949bdf | ||
![]() |
69ba0d3a50 | ||
![]() |
25fb70f281 | ||
![]() |
0304588bb8 | ||
![]() |
08f5081197 | ||
![]() |
701f35488c | ||
![]() |
d11012b2b7 | ||
![]() |
8384100e1b | ||
![]() |
cd0349ee4d | ||
![]() |
b413e481cb | ||
![]() |
9f7e6048f8 | ||
![]() |
2802b77f21 | ||
![]() |
964ad43a27 | ||
![]() |
182be6e0ea | ||
![]() |
cd11f01ace | ||
![]() |
742eca5927 | ||
![]() |
48e7fed901 | ||
![]() |
0a4c0fe7cc | ||
![]() |
9037cb8a7d | ||
![]() |
c97cc34879 | ||
![]() |
1ac9217630 | ||
![]() |
e4036a2f14 | ||
![]() |
da9c73a767 | ||
![]() |
e4aaaf10c3 | ||
![]() |
a7be76ba0a | ||
![]() |
f7cc91903c | ||
![]() |
4a8a674bd3 | ||
![]() |
a8db25fbd8 | ||
![]() |
2dc81ed866 | ||
![]() |
c4762f3ff4 | ||
![]() |
14285973b8 | ||
![]() |
353ccf3ea7 | ||
![]() |
6b90d8ff1a | ||
![]() |
51e691f832 | ||
![]() |
6c7ac7a6ef | ||
![]() |
52ed1bf44a | ||
![]() |
3eab0b704e | ||
![]() |
1f32e02ba2 | ||
![]() |
074418f8f7 | ||
![]() |
b711b17193 | ||
![]() |
03c3d09583 | ||
![]() |
f49547d598 | ||
![]() |
7678be8e2b | ||
![]() |
7672215095 | ||
![]() |
18cf96b92b | ||
![]() |
94d597fd41 | ||
![]() |
24b47b50ea | ||
![]() |
e3dfa84d65 | ||
![]() |
ed1366f463 | ||
![]() |
5d5908a03f | ||
![]() |
3062bad19e | ||
![]() |
28832cbd3e | ||
![]() |
ce94073321 | ||
![]() |
fa61e02207 | ||
![]() |
d1dab83f10 | ||
![]() |
2b7d593ebe | ||
![]() |
e407b4730d | ||
![]() |
0d19e85a0d | ||
![]() |
dac6271e01 | ||
![]() |
8cae8edc55 | ||
![]() |
a3b0909e3f | ||
![]() |
ee30520b57 | ||
![]() |
536e686892 | ||
![]() |
ef767c2b9f | ||
![]() |
c1ecc13cb3 | ||
![]() |
c5e3ba536c | ||
![]() |
0e324c074a | ||
![]() |
a3ba7803db | ||
![]() |
49bf5db5ff | ||
![]() |
50981c26ad | ||
![]() |
2adbf7c933 | ||
![]() |
838ef0bb9f | ||
![]() |
43c2658962 | ||
![]() |
bbefa971d8 | ||
![]() |
cb97f2f13c | ||
![]() |
a657b9bb84 | ||
![]() |
2d2f55a4df | ||
![]() |
df16e6d022 | ||
![]() |
56212c6fa5 | ||
![]() |
bc964ce7f0 | ||
![]() |
ed4f55406c | ||
![]() |
03d5b18974 | ||
![]() |
53c486ccd1 | ||
![]() |
9a2a177b28 | ||
![]() |
18e12740d9 | ||
![]() |
5a24b670a2 | ||
![]() |
94c5c8f42e | ||
![]() |
e84d5fba11 | ||
![]() |
b808c0c5eb | ||
![]() |
782417528c | ||
![]() |
7757423d18 | ||
![]() |
e5a28f4f25 | ||
![]() |
c18d50910f | ||
![]() |
d4adb1f298 | ||
![]() |
fe0a822721 | ||
![]() |
9f427893b1 | ||
![]() |
3b840c684b | ||
![]() |
bc84fdc64a | ||
![]() |
401262c23d | ||
![]() |
795384ca2d | ||
![]() |
dfc3423c83 | ||
![]() |
22b5071c26 | ||
![]() |
4b9524c5c1 | ||
![]() |
9cd46c7f03 | ||
![]() |
232a6868ff | ||
![]() |
361e0d4fc7 | ||
![]() |
26d8d5343a | ||
![]() |
995aab8347 | ||
![]() |
399011552b | ||
![]() |
0c9f30364c | ||
![]() |
bdc17621ee | ||
![]() |
399c53a57e | ||
![]() |
f55e13bde4 | ||
![]() |
dea31e5744 | ||
![]() |
48d9df89ac | ||
![]() |
adf836d9ac | ||
![]() |
51d6948848 | ||
![]() |
7ce74cb5ec | ||
![]() |
29ba140816 | ||
![]() |
0ca4f3e1ba | ||
![]() |
0430e6794e | ||
![]() |
29fa7f827a | ||
![]() |
57d1001603 | ||
![]() |
96de4b3828 | ||
![]() |
c6cb2884f4 | ||
![]() |
27e81fe0ed | ||
![]() |
2c1db10986 | ||
![]() |
a7ba4bd086 | ||
![]() |
25449b424f | ||
![]() |
f6f89bd807 | ||
![]() |
370d7d6bdf | ||
![]() |
4dbf3359c1 | ||
![]() |
25eb7173bf | ||
![]() |
648c3d500b | ||
![]() |
33016c2977 | ||
![]() |
5679b061d2 | ||
![]() |
2eb2bdd615 | ||
![]() |
184cbfea23 | ||
![]() |
f88bc008e5 | ||
![]() |
a927312fb5 | ||
![]() |
5f13db2356 | ||
![]() |
64e84e2aa0 | ||
![]() |
901457e7aa | ||
![]() |
89a9c2ec24 | ||
![]() |
9e04457472 | ||
![]() |
6ecdbb677f | ||
![]() |
211ce43127 | ||
![]() |
f5555df990 | ||
![]() |
82c2422990 | ||
![]() |
734ebc1adb | ||
![]() |
eb3371beef | ||
![]() |
e1ef1063fe | ||
![]() |
c355a53485 | ||
![]() |
79de1d9ed4 | ||
![]() |
7fefa5c235 | ||
![]() |
94db78a0be | ||
![]() |
83a1b06b56 | ||
![]() |
1e42a38473 | ||
![]() |
c54ed53a81 | ||
![]() |
611a952232 | ||
![]() |
05e76105ad | ||
![]() |
ed56e5d631 | ||
![]() |
9253fa4471 | ||
![]() |
5f36062ef3 | ||
![]() |
e562b6f42b | ||
![]() |
b76a94bd42 | ||
![]() |
4e11ff05de | ||
![]() |
080e3d7a42 | ||
![]() |
69e3348cd7 | ||
![]() |
6caa4baa00 | ||
![]() |
4729b19dc6 | ||
![]() |
8abbc4abbc | ||
![]() |
3a667bce8c | ||
![]() |
4c86102daf | ||
![]() |
15bf652f37 | ||
![]() |
eafed2b86c | ||
![]() |
79901cede9 | ||
![]() |
27dc82d7d0 | ||
![]() |
ae37c8cc7a | ||
![]() |
5eadfcc524 | ||
![]() |
5fd1e23255 | ||
![]() |
72bcc6702f | ||
![]() |
8889464e04 | ||
![]() |
af58b0c3b7 | ||
![]() |
e9e20229a3 | ||
![]() |
80ff6dc618 | ||
![]() |
fa30100160 | ||
![]() |
e6c20333b3 | ||
![]() |
3858400a6f | ||
![]() |
95eefbac20 | ||
![]() |
e1e731eb48 | ||
![]() |
f7ce4ff25c | ||
![]() |
617e87e02c | ||
![]() |
dafd54ba2b | ||
![]() |
e8c3539709 | ||
![]() |
e5263dc0c8 | ||
![]() |
3584c710b9 | ||
![]() |
0b56ef5699 | ||
![]() |
90bd9bb626 | ||
![]() |
7863927c3a | ||
![]() |
9fcf757021 | ||
![]() |
fc0547ccdf | ||
![]() |
22f8f117fb | ||
![]() |
2052579efc | ||
![]() |
b8f2583bc3 | ||
![]() |
6323a078e1 | ||
![]() |
ca0be3ec8a | ||
![]() |
91157c21ef | ||
![]() |
cc4fae10f5 | ||
![]() |
d180ff417d | ||
![]() |
8870b657d1 | ||
![]() |
81735b7b47 | ||
![]() |
7fd261347b | ||
![]() |
df796d432e | ||
![]() |
f6e36615d6 | ||
![]() |
0278735dbf | ||
![]() |
9c8d8fef16 | ||
![]() |
6897b24c10 | ||
![]() |
a2a3f59e65 | ||
![]() |
2626a74840 | ||
![]() |
689260f581 | ||
![]() |
f1a2c8be4b | ||
![]() |
0579d565dd | ||
![]() |
f141f5f908 | ||
![]() |
0c25252d9f | ||
![]() |
400b377aa8 | ||
![]() |
a5f3c434e0 | ||
![]() |
365f8046ac | ||
![]() |
4ac35d40cd | ||
![]() |
7691991a93 | ||
![]() |
d0c45b1857 | ||
![]() |
02750452df | ||
![]() |
41a81cbf15 | ||
![]() |
ff621d5bf3 | ||
![]() |
6d561a9796 | ||
![]() |
4784199038 | ||
![]() |
df35c8e707 | ||
![]() |
57eeaf1f75 | ||
![]() |
3cadc1796f | ||
![]() |
ae06f734ce | ||
![]() |
08a53362a7 | ||
![]() |
274c928ec0 | ||
![]() |
d75dda0c05 | ||
![]() |
0c40fcdaeb | ||
![]() |
0a1ba8a4a3 | ||
![]() |
018acc0a3c | ||
![]() |
3a293c6bc4 | ||
![]() |
9155d56190 | ||
![]() |
461dc13da9 | ||
![]() |
b48e2127b8 | ||
![]() |
11ab992dbb | ||
![]() |
4be2cdf90a | ||
![]() |
cdd5cb2876 | ||
![]() |
cdc67aa891 | ||
![]() |
6a22a2b867 | ||
![]() |
0883b23d0c | ||
![]() |
595459bfda | ||
![]() |
7ab8ff56b3 | ||
![]() |
eda36512ec | ||
![]() |
04aee812f8 | ||
![]() |
6718cce203 | ||
![]() |
49f0bb6990 | ||
![]() |
38afcbb21f | ||
![]() |
87ab2beddf | ||
![]() |
a05a34239d | ||
![]() |
f11aba9648 | ||
![]() |
c2ef119e50 | ||
![]() |
8b6c99776e | ||
![]() |
463bffaeb6 | ||
![]() |
0cfd8032c0 | ||
![]() |
144d5ff0cc | ||
![]() |
ab5c65b08c | ||
![]() |
6b33bf3961 | ||
![]() |
89eb395e2d | ||
![]() |
d671d48869 | ||
![]() |
ed582fae91 | ||
![]() |
4d5c3ee0aa | ||
![]() |
02046fcdb4 | ||
![]() |
fbe27749a0 | ||
![]() |
eddab96a69 | ||
![]() |
ed3376352d | ||
![]() |
dfbb763031 | ||
![]() |
5cf13d9273 | ||
![]() |
e18ffc53f2 | ||
![]() |
0eea3176d6 | ||
![]() |
4f20977a8e | ||
![]() |
5bd63bb56b | ||
![]() |
f7103da818 | ||
![]() |
bf4922a7ef | ||
![]() |
6f7eac5c6d | ||
![]() |
d6e73a89f3 | ||
![]() |
269aefd405 | ||
![]() |
a6865f1639 | ||
![]() |
f55aa0b86e | ||
![]() |
02b34f05aa | ||
![]() |
37f42707e5 | ||
![]() |
17f3ba1434 | ||
![]() |
31dcc25ba5 | ||
![]() |
4da93f6a5e | ||
![]() |
5ed7d32749 | ||
![]() |
ab5b9dbdc9 | ||
![]() |
3b28bf07d1 | ||
![]() |
b626c9b450 | ||
![]() |
5430eca93e | ||
![]() |
b41c477f44 | ||
![]() |
5900413c08 | ||
![]() |
c2ceab741f | ||
![]() |
45ff4940eb | ||
![]() |
9c8a15cb64 | ||
![]() |
b09e54c961 | ||
![]() |
f44b7e202a | ||
![]() |
0f535e979f | ||
![]() |
4c2c01b4f6 | ||
![]() |
b1d48fe9a2 | ||
![]() |
b1dfc3cd23 | ||
![]() |
696efe349e | ||
![]() |
6a32722acc | ||
![]() |
8eaec56c6b | ||
![]() |
60d3c9342d | ||
![]() |
2bd5039f28 | ||
![]() |
8b1b14a704 | ||
![]() |
5e674ce1d0 | ||
![]() |
3656bcf752 | ||
![]() |
39093fc2bc | ||
![]() |
efa5838be4 | ||
![]() |
1c6ad2fa66 | ||
![]() |
af144e1b77 | ||
![]() |
b451bfed81 | ||
![]() |
3e32c50936 | ||
![]() |
208b15637a | ||
![]() |
c958cce769 | ||
![]() |
602ec54579 | ||
![]() |
fa2bfc5d9d | ||
![]() |
94f906b34c |
@@ -79,6 +79,7 @@ components: &components
|
||||
- homeassistant/components/group/**
|
||||
- homeassistant/components/hassio/**
|
||||
- homeassistant/components/homeassistant/**
|
||||
- homeassistant/components/homeassistant_hardware/**
|
||||
- homeassistant/components/http/**
|
||||
- homeassistant/components/image/**
|
||||
- homeassistant/components/input_boolean/**
|
||||
|
2
.github/workflows/builder.yml
vendored
2
.github/workflows/builder.yml
vendored
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@1c608d11d69870c2092266b3f9a6f3abbf17002c # v1.4.3
|
||||
uses: actions/attest-build-provenance@ef244123eb79f2f7a7e75d99086184180e6d0018 # v1.4.4
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
28
.github/workflows/ci.yaml
vendored
28
.github/workflows/ci.yaml
vendored
@@ -40,9 +40,9 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2024.11"
|
||||
HA_SHORT_VERSION: "2024.12"
|
||||
DEFAULT_PYTHON: "3.12"
|
||||
ALL_PYTHON_VERSIONS: "['3.12']"
|
||||
ALL_PYTHON_VERSIONS: "['3.12', '3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
# - 10.3.32 is the version currently shipped with Synology (as of 17 Feb 2022)
|
||||
# 10.6 is the current long-term-support
|
||||
@@ -622,13 +622,13 @@ jobs:
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.1.2
|
||||
with:
|
||||
@@ -819,11 +819,7 @@ jobs:
|
||||
needs:
|
||||
- info
|
||||
- base
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ${{ fromJson(needs.info.outputs.python_versions) }}
|
||||
name: Split tests for full run Python ${{ matrix.python-version }}
|
||||
name: Split tests for full run
|
||||
steps:
|
||||
- name: Install additional OS dependencies
|
||||
run: |
|
||||
@@ -836,11 +832,11 @@ jobs:
|
||||
libgammu-dev
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v5.3.0
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
@@ -858,7 +854,7 @@ jobs:
|
||||
- name: Upload pytest_buckets
|
||||
uses: actions/upload-artifact@v4.4.3
|
||||
with:
|
||||
name: pytest_buckets-${{ matrix.python-version }}
|
||||
name: pytest_buckets
|
||||
path: pytest_buckets.txt
|
||||
overwrite: true
|
||||
|
||||
@@ -923,7 +919,7 @@ jobs:
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: pytest_buckets-${{ matrix.python-version }}
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
@@ -949,6 +945,7 @@ jobs:
|
||||
--timeout=9 \
|
||||
--durations=10 \
|
||||
--numprocesses auto \
|
||||
--snapshot-details \
|
||||
--dist=loadfile \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
@@ -1071,6 +1068,7 @@ jobs:
|
||||
-qq \
|
||||
--timeout=20 \
|
||||
--numprocesses 1 \
|
||||
--snapshot-details \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=10 \
|
||||
@@ -1199,6 +1197,7 @@ jobs:
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
--numprocesses 1 \
|
||||
--snapshot-details \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
@@ -1345,6 +1344,7 @@ jobs:
|
||||
-qq \
|
||||
--timeout=9 \
|
||||
--numprocesses auto \
|
||||
--snapshot-details \
|
||||
${cov_params[@]} \
|
||||
-o console_output_style=count \
|
||||
--durations=0 \
|
||||
|
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3.27.0
|
||||
uses: github/codeql-action/init@v3.27.1
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3.27.0
|
||||
uses: github/codeql-action/analyze@v3.27.1
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
30
.github/workflows/wheels.yml
vendored
30
.github/workflows/wheels.yml
vendored
@@ -112,7 +112,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312"]
|
||||
abi: ["cp312", "cp313"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
@@ -135,14 +135,14 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm"
|
||||
apk: "libffi-dev;openssl-dev;yaml-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;multidict;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
@@ -156,7 +156,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
abi: ["cp312"]
|
||||
abi: ["cp312", "cp313"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
@@ -198,6 +198,7 @@ jobs:
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Create requirements for cython<3
|
||||
if: matrix.abi == 'cp312'
|
||||
run: |
|
||||
# Some dependencies still require 'cython<3'
|
||||
# and don't yet use isolated build environments.
|
||||
@@ -208,7 +209,8 @@ jobs:
|
||||
cat homeassistant/package_constraints.txt | grep 'pydantic==' >> requirements_old-cython.txt
|
||||
|
||||
- name: Build wheels (old cython)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
if: matrix.abi == 'cp312'
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -223,43 +225,43 @@ jobs:
|
||||
pip: "'cython<3'"
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.07.1
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pydantic;pymicro-vad;yarl
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.7.1
|
||||
rev: v0.7.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@@ -324,11 +324,13 @@ homeassistant.components.moon.*
|
||||
homeassistant.components.mopeka.*
|
||||
homeassistant.components.motionmount.*
|
||||
homeassistant.components.mqtt.*
|
||||
homeassistant.components.music_assistant.*
|
||||
homeassistant.components.my.*
|
||||
homeassistant.components.mysensors.*
|
||||
homeassistant.components.myuplink.*
|
||||
homeassistant.components.nam.*
|
||||
homeassistant.components.nanoleaf.*
|
||||
homeassistant.components.nasweb.*
|
||||
homeassistant.components.neato.*
|
||||
homeassistant.components.nest.*
|
||||
homeassistant.components.netatmo.*
|
||||
@@ -338,6 +340,7 @@ homeassistant.components.nfandroidtv.*
|
||||
homeassistant.components.nightscout.*
|
||||
homeassistant.components.nissan_leaf.*
|
||||
homeassistant.components.no_ip.*
|
||||
homeassistant.components.nordpool.*
|
||||
homeassistant.components.notify.*
|
||||
homeassistant.components.notion.*
|
||||
homeassistant.components.number.*
|
||||
|
10
CODEOWNERS
10
CODEOWNERS
@@ -496,8 +496,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @mammuth @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||
/tests/components/fritzbox/ @mib1185 @flabbamann
|
||||
/homeassistant/components/fritzbox_callmonitor/ @cdce8p
|
||||
@@ -954,6 +954,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/msteams/ @peroyvind
|
||||
/homeassistant/components/mullvad/ @meichthys
|
||||
/tests/components/mullvad/ @meichthys
|
||||
/homeassistant/components/music_assistant/ @music-assistant
|
||||
/tests/components/music_assistant/ @music-assistant
|
||||
/homeassistant/components/mutesync/ @currentoor
|
||||
/tests/components/mutesync/ @currentoor
|
||||
/homeassistant/components/my/ @home-assistant/core
|
||||
@@ -968,6 +970,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
/tests/components/nasweb/ @nasWebio
|
||||
/homeassistant/components/neato/ @Santobert
|
||||
/tests/components/neato/ @Santobert
|
||||
/homeassistant/components/nederlandse_spoorwegen/ @YarmoM
|
||||
@@ -1008,6 +1012,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/noaa_tides/ @jdelaney72
|
||||
/homeassistant/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/tests/components/nobo_hub/ @echoromeo @oyvindwe
|
||||
/homeassistant/components/nordpool/ @gjohansson-ST
|
||||
/tests/components/nordpool/ @gjohansson-ST
|
||||
/homeassistant/components/notify/ @home-assistant/core
|
||||
/tests/components/notify/ @home-assistant/core
|
||||
/homeassistant/components/notify_events/ @matrozov @papajojo
|
||||
|
@@ -7,12 +7,13 @@ FROM ${BUILD_FROM}
|
||||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
UV_SYSTEM_PYTHON=true
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
UV_NO_CACHE=true
|
||||
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.4.28
|
||||
RUN pip3 install uv==0.5.0
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
@@ -9,6 +9,7 @@ import os
|
||||
import sys
|
||||
import threading
|
||||
|
||||
from .backup_restore import restore_backup
|
||||
from .const import REQUIRED_PYTHON_VER, RESTART_EXIT_CODE, __version__
|
||||
|
||||
FAULT_LOG_FILENAME = "home-assistant.log.fault"
|
||||
@@ -182,6 +183,9 @@ def main() -> int:
|
||||
return scripts.run(args.script)
|
||||
|
||||
config_dir = os.path.abspath(os.path.join(os.getcwd(), args.config))
|
||||
if restore_backup(config_dir):
|
||||
return RESTART_EXIT_CODE
|
||||
|
||||
ensure_config_path(config_dir)
|
||||
|
||||
# pylint: disable-next=import-outside-toplevel
|
||||
|
126
homeassistant/backup_restore.py
Normal file
126
homeassistant/backup_restore.py
Normal file
@@ -0,0 +1,126 @@
|
||||
"""Home Assistant module to handle restoring backups."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
import sys
|
||||
from tempfile import TemporaryDirectory
|
||||
|
||||
from awesomeversion import AwesomeVersion
|
||||
import securetar
|
||||
|
||||
from .const import __version__ as HA_VERSION
|
||||
|
||||
RESTORE_BACKUP_FILE = ".HA_RESTORE"
|
||||
KEEP_PATHS = ("backups",)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class RestoreBackupFileContent:
|
||||
"""Definition for restore backup file content."""
|
||||
|
||||
backup_file_path: Path
|
||||
|
||||
|
||||
def restore_backup_file_content(config_dir: Path) -> RestoreBackupFileContent | None:
|
||||
"""Return the contents of the restore backup file."""
|
||||
instruction_path = config_dir.joinpath(RESTORE_BACKUP_FILE)
|
||||
try:
|
||||
instruction_content = json.loads(instruction_path.read_text(encoding="utf-8"))
|
||||
return RestoreBackupFileContent(
|
||||
backup_file_path=Path(instruction_content["path"])
|
||||
)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
return None
|
||||
|
||||
|
||||
def _clear_configuration_directory(config_dir: Path) -> None:
|
||||
"""Delete all files and directories in the config directory except for the backups directory."""
|
||||
keep_paths = [config_dir.joinpath(path) for path in KEEP_PATHS]
|
||||
config_contents = sorted(
|
||||
[entry for entry in config_dir.iterdir() if entry not in keep_paths]
|
||||
)
|
||||
|
||||
for entry in config_contents:
|
||||
entrypath = config_dir.joinpath(entry)
|
||||
|
||||
if entrypath.is_file():
|
||||
entrypath.unlink()
|
||||
elif entrypath.is_dir():
|
||||
shutil.rmtree(entrypath)
|
||||
|
||||
|
||||
def _extract_backup(config_dir: Path, backup_file_path: Path) -> None:
|
||||
"""Extract the backup file to the config directory."""
|
||||
with (
|
||||
TemporaryDirectory() as tempdir,
|
||||
securetar.SecureTarFile(
|
||||
backup_file_path,
|
||||
gzip=False,
|
||||
mode="r",
|
||||
) as ostf,
|
||||
):
|
||||
ostf.extractall(
|
||||
path=Path(tempdir, "extracted"),
|
||||
members=securetar.secure_path(ostf),
|
||||
filter="fully_trusted",
|
||||
)
|
||||
backup_meta_file = Path(tempdir, "extracted", "backup.json")
|
||||
backup_meta = json.loads(backup_meta_file.read_text(encoding="utf8"))
|
||||
|
||||
if (
|
||||
backup_meta_version := AwesomeVersion(
|
||||
backup_meta["homeassistant"]["version"]
|
||||
)
|
||||
) > HA_VERSION:
|
||||
raise ValueError(
|
||||
f"You need at least Home Assistant version {backup_meta_version} to restore this backup"
|
||||
)
|
||||
|
||||
with securetar.SecureTarFile(
|
||||
Path(
|
||||
tempdir,
|
||||
"extracted",
|
||||
f"homeassistant.tar{'.gz' if backup_meta["compressed"] else ''}",
|
||||
),
|
||||
gzip=backup_meta["compressed"],
|
||||
mode="r",
|
||||
) as istf:
|
||||
for member in istf.getmembers():
|
||||
if member.name == "data":
|
||||
continue
|
||||
member.name = member.name.replace("data/", "")
|
||||
_clear_configuration_directory(config_dir)
|
||||
istf.extractall(
|
||||
path=config_dir,
|
||||
members=[
|
||||
member
|
||||
for member in securetar.secure_path(istf)
|
||||
if member.name != "data"
|
||||
],
|
||||
filter="fully_trusted",
|
||||
)
|
||||
|
||||
|
||||
def restore_backup(config_dir_path: str) -> bool:
|
||||
"""Restore the backup file if any.
|
||||
|
||||
Returns True if a restore backup file was found and restored, False otherwise.
|
||||
"""
|
||||
config_dir = Path(config_dir_path)
|
||||
if not (restore_content := restore_backup_file_content(config_dir)):
|
||||
return False
|
||||
|
||||
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
|
||||
backup_file_path = restore_content.backup_file_path
|
||||
_LOGGER.info("Restoring %s", backup_file_path)
|
||||
try:
|
||||
_extract_backup(config_dir, backup_file_path)
|
||||
except FileNotFoundError as err:
|
||||
raise ValueError(f"Backup file {backup_file_path} does not exist") from err
|
||||
_LOGGER.info("Restore complete, restarting")
|
||||
return True
|
@@ -1,6 +1,5 @@
|
||||
"""The AEMET OpenData component."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from aemet_opendata.exceptions import AemetError, TownNotFound
|
||||
@@ -13,20 +12,10 @@ from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
|
||||
from .const import CONF_STATION_UPDATES, PLATFORMS
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .coordinator import AemetConfigEntry, AemetData, WeatherUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type AemetConfigEntry = ConfigEntry[AemetData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AemetData:
|
||||
"""Aemet runtime data."""
|
||||
|
||||
name: str
|
||||
coordinator: WeatherUpdateCoordinator
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> bool:
|
||||
"""Set up AEMET OpenData as config entry."""
|
||||
@@ -46,7 +35,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AemetConfigEntry) -> boo
|
||||
except AemetError as err:
|
||||
raise ConfigEntryNotReady(err) from err
|
||||
|
||||
weather_coordinator = WeatherUpdateCoordinator(hass, aemet)
|
||||
weather_coordinator = WeatherUpdateCoordinator(hass, entry, aemet)
|
||||
await weather_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
entry.runtime_data = AemetData(name=name, coordinator=weather_coordinator)
|
||||
|
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import timeout
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
@@ -19,6 +20,7 @@ from aemet_opendata.helpers import dict_nested_value
|
||||
from aemet_opendata.interface import AEMET
|
||||
|
||||
from homeassistant.components.weather import Forecast
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -29,6 +31,16 @@ _LOGGER = logging.getLogger(__name__)
|
||||
API_TIMEOUT: Final[int] = 120
|
||||
WEATHER_UPDATE_INTERVAL = timedelta(minutes=10)
|
||||
|
||||
type AemetConfigEntry = ConfigEntry[AemetData]
|
||||
|
||||
|
||||
@dataclass
|
||||
class AemetData:
|
||||
"""Aemet runtime data."""
|
||||
|
||||
name: str
|
||||
coordinator: WeatherUpdateCoordinator
|
||||
|
||||
|
||||
class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
"""Weather data update coordinator."""
|
||||
@@ -36,6 +48,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: AemetConfigEntry,
|
||||
aemet: AEMET,
|
||||
) -> None:
|
||||
"""Initialize coordinator."""
|
||||
@@ -44,6 +57,7 @@ class WeatherUpdateCoordinator(DataUpdateCoordinator):
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=DOMAIN,
|
||||
update_interval=WEATHER_UPDATE_INTERVAL,
|
||||
)
|
||||
|
@@ -15,7 +15,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import AemetConfigEntry
|
||||
from .coordinator import AemetConfigEntry
|
||||
|
||||
TO_REDACT_CONFIG = [
|
||||
CONF_API_KEY,
|
||||
|
@@ -55,7 +55,6 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from . import AemetConfigEntry
|
||||
from .const import (
|
||||
ATTR_API_CONDITION,
|
||||
ATTR_API_FORECAST_CONDITION,
|
||||
@@ -87,7 +86,7 @@ from .const import (
|
||||
ATTR_API_WIND_SPEED,
|
||||
CONDITIONS_MAP,
|
||||
)
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
|
||||
|
||||
|
@@ -27,9 +27,8 @@ from homeassistant.const import (
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
from . import AemetConfigEntry
|
||||
from .const import CONDITIONS_MAP
|
||||
from .coordinator import WeatherUpdateCoordinator
|
||||
from .coordinator import AemetConfigEntry, WeatherUpdateCoordinator
|
||||
from .entity import AemetEntity
|
||||
|
||||
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/agent_dvr",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["agent"],
|
||||
"requirements": ["agent-py==0.0.23"]
|
||||
"requirements": ["agent-py==0.0.24"]
|
||||
}
|
||||
|
@@ -1,5 +1,7 @@
|
||||
"""Config flow for AirNow integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -12,7 +14,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -120,12 +121,12 @@ class AirNowConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
) -> AirNowOptionsFlowHandler:
|
||||
"""Return the options flow."""
|
||||
return AirNowOptionsFlowHandler(config_entry)
|
||||
return AirNowOptionsFlowHandler()
|
||||
|
||||
|
||||
class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class AirNowOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle an options flow for AirNow."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -136,12 +137,7 @@ class AirNowOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
return self.async_create_entry(data=user_input)
|
||||
|
||||
options_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(CONF_RADIUS): vol.All(
|
||||
int,
|
||||
vol.Range(min=5),
|
||||
),
|
||||
}
|
||||
{vol.Optional(CONF_RADIUS): vol.All(int, vol.Range(min=5))}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
|
@@ -16,7 +16,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
@@ -46,9 +45,11 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> HomeassistantAnalyticsOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return HomeassistantAnalyticsOptionsFlowHandler(config_entry)
|
||||
return HomeassistantAnalyticsOptionsFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -132,7 +133,7 @@ class HomeassistantAnalyticsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle Homeassistant Analytics options."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -211,6 +212,6 @@ class HomeassistantAnalyticsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
),
|
||||
},
|
||||
),
|
||||
self.options,
|
||||
self.config_entry.options,
|
||||
),
|
||||
)
|
||||
|
@@ -13,7 +13,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_CLASS, CONF_HOST, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
@@ -186,16 +186,14 @@ class AndroidTVFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return OptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle an option flow for Android Debug Bridge."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
super().__init__(config_entry)
|
||||
|
||||
self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {})
|
||||
self._state_det_rules: dict[str, Any] = self.options.setdefault(
|
||||
CONF_STATE_DETECTION_RULES, {}
|
||||
self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {}))
|
||||
self._state_det_rules: dict[str, Any] = dict(
|
||||
config_entry.options.get(CONF_STATE_DETECTION_RULES, {})
|
||||
)
|
||||
self._conf_app_id: str | None = None
|
||||
self._conf_rule_id: str | None = None
|
||||
@@ -237,7 +235,7 @@ class OptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
SelectOptionDict(value=k, label=v) for k, v in apps_list.items()
|
||||
]
|
||||
rules = [RULES_NEW_ID, *self._state_det_rules]
|
||||
options = self.options
|
||||
options = self.config_entry.options
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
|
@@ -20,7 +20,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_NAME
|
||||
from homeassistant.core import callback
|
||||
@@ -221,13 +221,12 @@ class AndroidTVRemoteConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return AndroidTVRemoteOptionsFlowHandler(config_entry)
|
||||
|
||||
|
||||
class AndroidTVRemoteOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class AndroidTVRemoteOptionsFlowHandler(OptionsFlow):
|
||||
"""Android TV Remote options flow."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
super().__init__(config_entry)
|
||||
self._apps: dict[str, Any] = self.options.setdefault(CONF_APPS, {})
|
||||
self._apps: dict[str, Any] = dict(config_entry.options.get(CONF_APPS, {}))
|
||||
self._conf_app_id: str | None = None
|
||||
|
||||
@callback
|
||||
|
@@ -121,7 +121,6 @@ class AnthropicOptionsFlow(OptionsFlow):
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
self.last_rendered_recommended = config_entry.options.get(
|
||||
CONF_RECOMMENDED, False
|
||||
)
|
||||
|
@@ -22,8 +22,8 @@ class EnhancedAudioChunk:
|
||||
timestamp_ms: int
|
||||
"""Timestamp relative to start of audio stream (milliseconds)"""
|
||||
|
||||
is_speech: bool | None
|
||||
"""True if audio chunk likely contains speech, False if not, None if unknown"""
|
||||
speech_probability: float | None
|
||||
"""Probability that audio chunk contains speech (0-1), None if unknown"""
|
||||
|
||||
|
||||
class AudioEnhancer(ABC):
|
||||
@@ -70,27 +70,27 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
)
|
||||
|
||||
self.vad: MicroVad | None = None
|
||||
self.threshold = 0.5
|
||||
|
||||
if self.is_vad_enabled:
|
||||
self.vad = MicroVad()
|
||||
_LOGGER.debug("Initialized microVAD with threshold=%s", self.threshold)
|
||||
_LOGGER.debug("Initialized microVAD")
|
||||
|
||||
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
is_speech: bool | None = None
|
||||
speech_probability: float | None = None
|
||||
|
||||
assert len(audio) == BYTES_PER_CHUNK
|
||||
|
||||
if self.vad is not None:
|
||||
# Run VAD
|
||||
speech_prob = self.vad.Process10ms(audio)
|
||||
is_speech = speech_prob > self.threshold
|
||||
speech_probability = self.vad.Process10ms(audio)
|
||||
|
||||
if self.audio_processor is not None:
|
||||
# Run noise suppression and auto gain
|
||||
audio = self.audio_processor.Process10ms(audio).audio
|
||||
|
||||
return EnhancedAudioChunk(
|
||||
audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech
|
||||
audio=audio,
|
||||
timestamp_ms=timestamp_ms,
|
||||
speech_probability=speech_probability,
|
||||
)
|
||||
|
@@ -780,7 +780,9 @@ class PipelineRun:
|
||||
# speaking the voice command.
|
||||
audio_chunks_for_stt.extend(
|
||||
EnhancedAudioChunk(
|
||||
audio=chunk_ts[0], timestamp_ms=chunk_ts[1], is_speech=False
|
||||
audio=chunk_ts[0],
|
||||
timestamp_ms=chunk_ts[1],
|
||||
speech_probability=None,
|
||||
)
|
||||
for chunk_ts in result.queued_audio
|
||||
)
|
||||
@@ -827,7 +829,7 @@ class PipelineRun:
|
||||
|
||||
if wake_word_vad is not None:
|
||||
chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate
|
||||
if not wake_word_vad.process(chunk_seconds, chunk.is_speech):
|
||||
if not wake_word_vad.process(chunk_seconds, chunk.speech_probability):
|
||||
raise WakeWordTimeoutError(
|
||||
code="wake-word-timeout", message="Wake word was not detected"
|
||||
)
|
||||
@@ -955,7 +957,7 @@ class PipelineRun:
|
||||
|
||||
if stt_vad is not None:
|
||||
chunk_seconds = (len(chunk.audio) // sample_width) / sample_rate
|
||||
if not stt_vad.process(chunk_seconds, chunk.is_speech):
|
||||
if not stt_vad.process(chunk_seconds, chunk.speech_probability):
|
||||
# Silence detected at the end of voice command
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
@@ -1221,7 +1223,7 @@ class PipelineRun:
|
||||
yield EnhancedAudioChunk(
|
||||
audio=sub_chunk,
|
||||
timestamp_ms=timestamp_ms,
|
||||
is_speech=None, # no VAD
|
||||
speech_probability=None, # no VAD
|
||||
)
|
||||
timestamp_ms += MS_PER_CHUNK
|
||||
|
||||
|
@@ -75,7 +75,7 @@ class AudioBuffer:
|
||||
class VoiceCommandSegmenter:
|
||||
"""Segments an audio stream into voice commands."""
|
||||
|
||||
speech_seconds: float = 0.3
|
||||
speech_seconds: float = 0.1
|
||||
"""Seconds of speech before voice command has started."""
|
||||
|
||||
command_seconds: float = 1.0
|
||||
@@ -96,6 +96,12 @@ class VoiceCommandSegmenter:
|
||||
timed_out: bool = False
|
||||
"""True a timeout occurred during voice command."""
|
||||
|
||||
before_command_speech_threshold: float = 0.2
|
||||
"""Probability threshold for speech before voice command."""
|
||||
|
||||
in_command_speech_threshold: float = 0.5
|
||||
"""Probability threshold for speech during voice command."""
|
||||
|
||||
_speech_seconds_left: float = 0.0
|
||||
"""Seconds left before considering voice command as started."""
|
||||
|
||||
@@ -124,7 +130,7 @@ class VoiceCommandSegmenter:
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
self.in_command = False
|
||||
|
||||
def process(self, chunk_seconds: float, is_speech: bool | None) -> bool:
|
||||
def process(self, chunk_seconds: float, speech_probability: float | None) -> bool:
|
||||
"""Process samples using external VAD.
|
||||
|
||||
Returns False when command is done.
|
||||
@@ -142,7 +148,12 @@ class VoiceCommandSegmenter:
|
||||
self.timed_out = True
|
||||
return False
|
||||
|
||||
if speech_probability is None:
|
||||
speech_probability = 0.0
|
||||
|
||||
if not self.in_command:
|
||||
# Before command
|
||||
is_speech = speech_probability > self.before_command_speech_threshold
|
||||
if is_speech:
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
self._speech_seconds_left -= chunk_seconds
|
||||
@@ -160,24 +171,29 @@ class VoiceCommandSegmenter:
|
||||
if self._reset_seconds_left <= 0:
|
||||
self._speech_seconds_left = self.speech_seconds
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
elif not is_speech:
|
||||
# Silence in command
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
self._silence_seconds_left -= chunk_seconds
|
||||
self._command_seconds_left -= chunk_seconds
|
||||
if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0):
|
||||
# Command finished successfully
|
||||
self.reset()
|
||||
_LOGGER.debug("Voice command finished")
|
||||
return False
|
||||
else:
|
||||
# Speech in command.
|
||||
# Reset silence counter if enough speech.
|
||||
self._reset_seconds_left -= chunk_seconds
|
||||
self._command_seconds_left -= chunk_seconds
|
||||
if self._reset_seconds_left <= 0:
|
||||
self._silence_seconds_left = self.silence_seconds
|
||||
# In command
|
||||
is_speech = speech_probability > self.in_command_speech_threshold
|
||||
if not is_speech:
|
||||
# Silence in command
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
self._silence_seconds_left -= chunk_seconds
|
||||
self._command_seconds_left -= chunk_seconds
|
||||
if (self._silence_seconds_left <= 0) and (
|
||||
self._command_seconds_left <= 0
|
||||
):
|
||||
# Command finished successfully
|
||||
self.reset()
|
||||
_LOGGER.debug("Voice command finished")
|
||||
return False
|
||||
else:
|
||||
# Speech in command.
|
||||
# Reset silence counter if enough speech.
|
||||
self._reset_seconds_left -= chunk_seconds
|
||||
self._command_seconds_left -= chunk_seconds
|
||||
if self._reset_seconds_left <= 0:
|
||||
self._silence_seconds_left = self.silence_seconds
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
|
||||
return True
|
||||
|
||||
@@ -226,6 +242,9 @@ class VoiceActivityTimeout:
|
||||
reset_seconds: float = 0.5
|
||||
"""Seconds of speech before resetting timeout."""
|
||||
|
||||
speech_threshold: float = 0.5
|
||||
"""Threshold for speech."""
|
||||
|
||||
_silence_seconds_left: float = 0.0
|
||||
"""Seconds left before considering voice command as stopped."""
|
||||
|
||||
@@ -241,12 +260,15 @@ class VoiceActivityTimeout:
|
||||
self._silence_seconds_left = self.silence_seconds
|
||||
self._reset_seconds_left = self.reset_seconds
|
||||
|
||||
def process(self, chunk_seconds: float, is_speech: bool | None) -> bool:
|
||||
def process(self, chunk_seconds: float, speech_probability: float | None) -> bool:
|
||||
"""Process samples using external VAD.
|
||||
|
||||
Returns False when timeout is reached.
|
||||
"""
|
||||
if is_speech:
|
||||
if speech_probability is None:
|
||||
speech_probability = 0.0
|
||||
|
||||
if speech_probability > self.speech_threshold:
|
||||
# Speech
|
||||
self._reset_seconds_left -= chunk_seconds
|
||||
if self._reset_seconds_left <= 0:
|
||||
|
@@ -18,7 +18,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -59,9 +59,11 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> AxisOptionsFlowHandler:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> AxisOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return AxisOptionsFlowHandler(config_entry)
|
||||
return AxisOptionsFlowHandler()
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the Axis config flow."""
|
||||
@@ -264,7 +266,7 @@ class AxisFlowHandler(ConfigFlow, domain=AXIS_DOMAIN):
|
||||
return await self.async_step_user()
|
||||
|
||||
|
||||
class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class AxisOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle Axis device options."""
|
||||
|
||||
config_entry: AxisConfigEntry
|
||||
@@ -282,8 +284,7 @@ class AxisOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the Axis device stream options."""
|
||||
if user_input is not None:
|
||||
self.options.update(user_input)
|
||||
return self.async_create_entry(title="", data=self.options)
|
||||
return self.async_create_entry(data=self.config_entry.options | user_input)
|
||||
|
||||
schema = {}
|
||||
|
||||
|
@@ -17,6 +17,7 @@ LOGGER = getLogger(__package__)
|
||||
EXCLUDE_FROM_BACKUP = [
|
||||
"__pycache__/*",
|
||||
".DS_Store",
|
||||
".HA_RESTORE",
|
||||
"*.db-shm",
|
||||
"*.log.*",
|
||||
"*.log",
|
||||
|
@@ -16,6 +16,7 @@ from typing import Any, Protocol, cast
|
||||
|
||||
from securetar import SecureTarFile, atomic_contents_add
|
||||
|
||||
from homeassistant.backup_restore import RESTORE_BACKUP_FILE
|
||||
from homeassistant.const import __version__ as HAVERSION
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
@@ -123,6 +124,10 @@ class BaseBackupManager(abc.ABC):
|
||||
LOGGER.debug("Loaded %s platforms", len(self.platforms))
|
||||
self.loaded_platforms = True
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_restore_backup(self, slug: str, **kwargs: Any) -> None:
|
||||
"""Restore a backup."""
|
||||
|
||||
@abc.abstractmethod
|
||||
async def async_create_backup(self, **kwargs: Any) -> Backup:
|
||||
"""Generate a backup."""
|
||||
@@ -291,6 +296,25 @@ class BackupManager(BaseBackupManager):
|
||||
|
||||
return tar_file_path.stat().st_size
|
||||
|
||||
async def async_restore_backup(self, slug: str, **kwargs: Any) -> None:
|
||||
"""Restore a backup.
|
||||
|
||||
This will write the restore information to .HA_RESTORE which
|
||||
will be handled during startup by the restore_backup module.
|
||||
"""
|
||||
if (backup := await self.async_get_backup(slug=slug)) is None:
|
||||
raise HomeAssistantError(f"Backup {slug} not found")
|
||||
|
||||
def _write_restore_file() -> None:
|
||||
"""Write the restore file."""
|
||||
Path(self.hass.config.path(RESTORE_BACKUP_FILE)).write_text(
|
||||
json.dumps({"path": backup.path.as_posix()}),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
await self.hass.async_add_executor_job(_write_restore_file)
|
||||
await self.hass.services.async_call("homeassistant", "restart", {})
|
||||
|
||||
|
||||
def _generate_slug(date: str, name: str) -> str:
|
||||
"""Generate a backup slug."""
|
||||
|
@@ -22,6 +22,7 @@ def async_register_websocket_handlers(hass: HomeAssistant, with_hassio: bool) ->
|
||||
websocket_api.async_register_command(hass, handle_info)
|
||||
websocket_api.async_register_command(hass, handle_create)
|
||||
websocket_api.async_register_command(hass, handle_remove)
|
||||
websocket_api.async_register_command(hass, handle_restore)
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@@ -85,6 +86,24 @@ async def handle_remove(
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "backup/restore",
|
||||
vol.Required("slug"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
async def handle_restore(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Restore a backup."""
|
||||
await hass.data[DATA_MANAGER].async_restore_backup(msg["slug"])
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command({vol.Required("type"): "backup/generate"})
|
||||
@websocket_api.async_response
|
||||
|
@@ -17,46 +17,9 @@ from homeassistant.components.media_player import (
|
||||
class BangOlufsenSource:
|
||||
"""Class used for associating device source ids with friendly names. May not include all sources."""
|
||||
|
||||
URI_STREAMER: Final[Source] = Source(
|
||||
name="Audio Streamer",
|
||||
id="uriStreamer",
|
||||
is_seekable=False,
|
||||
)
|
||||
BLUETOOTH: Final[Source] = Source(
|
||||
name="Bluetooth",
|
||||
id="bluetooth",
|
||||
is_seekable=False,
|
||||
)
|
||||
CHROMECAST: Final[Source] = Source(
|
||||
name="Chromecast built-in",
|
||||
id="chromeCast",
|
||||
is_seekable=False,
|
||||
)
|
||||
LINE_IN: Final[Source] = Source(
|
||||
name="Line-In",
|
||||
id="lineIn",
|
||||
is_seekable=False,
|
||||
)
|
||||
SPDIF: Final[Source] = Source(
|
||||
name="Optical",
|
||||
id="spdif",
|
||||
is_seekable=False,
|
||||
)
|
||||
NET_RADIO: Final[Source] = Source(
|
||||
name="B&O Radio",
|
||||
id="netRadio",
|
||||
is_seekable=False,
|
||||
)
|
||||
DEEZER: Final[Source] = Source(
|
||||
name="Deezer",
|
||||
id="deezer",
|
||||
is_seekable=True,
|
||||
)
|
||||
TIDAL: Final[Source] = Source(
|
||||
name="Tidal",
|
||||
id="tidal",
|
||||
is_seekable=True,
|
||||
)
|
||||
LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn")
|
||||
SPDIF: Final[Source] = Source(name="Optical", id="spdif")
|
||||
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
|
||||
|
||||
|
||||
BANG_OLUFSEN_STATES: dict[str, MediaPlayerState] = {
|
||||
@@ -170,20 +133,6 @@ VALID_MEDIA_TYPES: Final[tuple] = (
|
||||
MediaType.CHANNEL,
|
||||
)
|
||||
|
||||
# Sources on the device that should not be selectable by the user
|
||||
HIDDEN_SOURCE_IDS: Final[tuple] = (
|
||||
"airPlay",
|
||||
"bluetooth",
|
||||
"chromeCast",
|
||||
"generator",
|
||||
"local",
|
||||
"dlna",
|
||||
"qplay",
|
||||
"wpl",
|
||||
"pl",
|
||||
"beolink",
|
||||
"usbIn",
|
||||
)
|
||||
|
||||
# Fallback sources to use in case of API failure.
|
||||
FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
@@ -191,7 +140,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
Source(
|
||||
id="uriStreamer",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Audio Streamer",
|
||||
type=SourceTypeEnum(value="uriStreamer"),
|
||||
is_seekable=False,
|
||||
@@ -199,7 +148,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
Source(
|
||||
id="bluetooth",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Bluetooth",
|
||||
type=SourceTypeEnum(value="bluetooth"),
|
||||
is_seekable=False,
|
||||
@@ -207,7 +156,7 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
Source(
|
||||
id="spotify",
|
||||
is_enabled=True,
|
||||
is_playable=False,
|
||||
is_playable=True,
|
||||
name="Spotify Connect",
|
||||
type=SourceTypeEnum(value="spotify"),
|
||||
is_seekable=True,
|
||||
|
9
homeassistant/components/bang_olufsen/icons.json
Normal file
9
homeassistant/components/bang_olufsen/icons.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"services": {
|
||||
"beolink_join": { "service": "mdi:location-enter" },
|
||||
"beolink_expand": { "service": "mdi:location-enter" },
|
||||
"beolink_unexpand": { "service": "mdi:location-exit" },
|
||||
"beolink_leave": { "service": "mdi:close-circle-outline" },
|
||||
"beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" }
|
||||
}
|
||||
}
|
@@ -11,7 +11,7 @@ from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from aiohttp import ClientConnectorError
|
||||
from mozart_api import __version__ as MOZART_API_VERSION
|
||||
from mozart_api.exceptions import ApiException
|
||||
from mozart_api.exceptions import ApiException, NotFoundException
|
||||
from mozart_api.models import (
|
||||
Action,
|
||||
Art,
|
||||
@@ -38,6 +38,7 @@ from mozart_api.models import (
|
||||
VolumeState,
|
||||
)
|
||||
from mozart_api.mozart_client import MozartClient, get_highest_resolution_artwork
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import media_source
|
||||
from homeassistant.components.media_player import (
|
||||
@@ -55,10 +56,17 @@ from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_MODEL, Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
device_registry as dr,
|
||||
entity_registry as er,
|
||||
)
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.util.dt import utcnow
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
@@ -70,7 +78,6 @@ from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
FALLBACK_SOURCES,
|
||||
HIDDEN_SOURCE_IDS,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
@@ -117,6 +124,58 @@ async def async_setup_entry(
|
||||
]
|
||||
)
|
||||
|
||||
# Register actions.
|
||||
platform = async_get_current_platform()
|
||||
|
||||
jid_regex = vol.Match(
|
||||
r"(^\d{4})[.](\d{7})[.](\d{8})(@products\.bang-olufsen\.com)$"
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_join",
|
||||
schema={vol.Optional("beolink_jid"): jid_regex},
|
||||
func="async_beolink_join",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_expand",
|
||||
schema={
|
||||
vol.Exclusive("all_discovered", "devices", ""): cv.boolean,
|
||||
vol.Exclusive(
|
||||
"beolink_jids",
|
||||
"devices",
|
||||
"Define either specific Beolink JIDs or all discovered",
|
||||
): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_expand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_unexpand",
|
||||
schema={
|
||||
vol.Required("beolink_jids"): vol.All(
|
||||
cv.ensure_list,
|
||||
[jid_regex],
|
||||
),
|
||||
},
|
||||
func="async_beolink_unexpand",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_leave",
|
||||
schema=None,
|
||||
func="async_beolink_leave",
|
||||
)
|
||||
|
||||
platform.async_register_entity_service(
|
||||
name="beolink_allstandby",
|
||||
schema=None,
|
||||
func="async_beolink_allstandby",
|
||||
)
|
||||
|
||||
|
||||
class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Representation of a media player."""
|
||||
@@ -157,6 +216,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Beolink compatible sources
|
||||
self._beolink_sources: dict[str, bool] = {}
|
||||
self._remote_leader: BeolinkLeader | None = None
|
||||
# Extra state attributes for showing Beolink: peer(s), listener(s), leader and self
|
||||
self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Turn on the dispatchers."""
|
||||
@@ -166,9 +227,11 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
CONNECTION_STATUS: self._async_update_connection_state,
|
||||
WebsocketNotification.ACTIVE_LISTENING_MODE: self._async_update_sound_modes,
|
||||
WebsocketNotification.BEOLINK: self._async_update_beolink,
|
||||
WebsocketNotification.CONFIGURATION: self._async_update_name_and_beolink,
|
||||
WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error,
|
||||
WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata_and_beolink,
|
||||
WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress,
|
||||
WebsocketNotification.PLAYBACK_SOURCE: self._async_update_sources,
|
||||
WebsocketNotification.PLAYBACK_STATE: self._async_update_playback_state,
|
||||
WebsocketNotification.REMOTE_MENU_CHANGED: self._async_update_sources,
|
||||
WebsocketNotification.SOURCE_CHANGE: self._async_update_source_change,
|
||||
@@ -230,6 +293,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
await self._async_update_sound_modes()
|
||||
|
||||
# Update beolink attributes and device name.
|
||||
await self._async_update_name_and_beolink()
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update queue settings."""
|
||||
# The WebSocket event listener is the main handler for connection state.
|
||||
@@ -243,7 +309,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
if queue_settings.shuffle is not None:
|
||||
self._attr_shuffle = queue_settings.shuffle
|
||||
|
||||
async def _async_update_sources(self) -> None:
|
||||
async def _async_update_sources(self, _: Source | None = None) -> None:
|
||||
"""Get sources for the specific product."""
|
||||
|
||||
# Audio sources
|
||||
@@ -270,10 +336,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self._audio_sources = {
|
||||
source.id: source.name
|
||||
for source in cast(list[Source], sources.items)
|
||||
if source.is_enabled
|
||||
and source.id
|
||||
and source.name
|
||||
and source.id not in HIDDEN_SOURCE_IDS
|
||||
if source.is_enabled and source.id and source.name and source.is_playable
|
||||
}
|
||||
|
||||
# Some sources are not Beolink expandable, meaning that they can't be joined by
|
||||
@@ -375,9 +438,44 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def _async_update_name_and_beolink(self) -> None:
|
||||
"""Update the device friendly name."""
|
||||
beolink_self = await self._client.get_beolink_self()
|
||||
|
||||
# Update device name
|
||||
device_registry = dr.async_get(self.hass)
|
||||
assert self.device_entry is not None
|
||||
|
||||
device_registry.async_update_device(
|
||||
device_id=self.device_entry.id,
|
||||
name=beolink_self.friendly_name,
|
||||
)
|
||||
|
||||
await self._async_update_beolink()
|
||||
|
||||
async def _async_update_beolink(self) -> None:
|
||||
"""Update the current Beolink leader, listeners, peers and self."""
|
||||
|
||||
self._beolink_attributes = {}
|
||||
|
||||
assert self.device_entry is not None
|
||||
assert self.device_entry.name is not None
|
||||
|
||||
# Add Beolink self
|
||||
self._beolink_attributes = {
|
||||
"beolink": {"self": {self.device_entry.name: self._beolink_jid}}
|
||||
}
|
||||
|
||||
# Add Beolink peers
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
if len(peers) > 0:
|
||||
self._beolink_attributes["beolink"]["peers"] = {}
|
||||
for peer in peers:
|
||||
self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = (
|
||||
peer.jid
|
||||
)
|
||||
|
||||
# Add Beolink listeners / leader
|
||||
self._remote_leader = self._playback_metadata.remote_leader
|
||||
|
||||
@@ -397,9 +495,14 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Add self
|
||||
group_members.append(self.entity_id)
|
||||
|
||||
self._beolink_attributes["beolink"]["leader"] = {
|
||||
self._remote_leader.friendly_name: self._remote_leader.jid,
|
||||
}
|
||||
|
||||
# If not listener, check if leader.
|
||||
else:
|
||||
beolink_listeners = await self._client.get_beolink_listeners()
|
||||
beolink_listeners_attribute = {}
|
||||
|
||||
# Check if the device is a leader.
|
||||
if len(beolink_listeners) > 0:
|
||||
@@ -420,6 +523,18 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
for beolink_listener in beolink_listeners
|
||||
]
|
||||
)
|
||||
# Update Beolink attributes
|
||||
for beolink_listener in beolink_listeners:
|
||||
for peer in peers:
|
||||
if peer.jid == beolink_listener.jid:
|
||||
# Get the friendly names for the listeners from the peers
|
||||
beolink_listeners_attribute[peer.friendly_name] = (
|
||||
beolink_listener.jid
|
||||
)
|
||||
break
|
||||
self._beolink_attributes["beolink"]["listeners"] = (
|
||||
beolink_listeners_attribute
|
||||
)
|
||||
|
||||
self._attr_group_members = group_members
|
||||
|
||||
@@ -573,38 +688,19 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
@property
|
||||
def source(self) -> str | None:
|
||||
"""Return the current audio source."""
|
||||
|
||||
# Try to fix some of the source_change chromecast weirdness.
|
||||
if hasattr(self._playback_metadata, "title"):
|
||||
# source_change is chromecast but line in is selected.
|
||||
if self._playback_metadata.title == BangOlufsenSource.LINE_IN.name:
|
||||
return BangOlufsenSource.LINE_IN.name
|
||||
|
||||
# source_change is chromecast but bluetooth is selected.
|
||||
if self._playback_metadata.title == BangOlufsenSource.BLUETOOTH.name:
|
||||
return BangOlufsenSource.BLUETOOTH.name
|
||||
|
||||
# source_change is line in, bluetooth or optical but stale metadata is sent through the WebSocket,
|
||||
# And the source has not changed.
|
||||
if self._source_change.id in (
|
||||
BangOlufsenSource.BLUETOOTH.id,
|
||||
BangOlufsenSource.LINE_IN.id,
|
||||
BangOlufsenSource.SPDIF.id,
|
||||
):
|
||||
return BangOlufsenSource.CHROMECAST.name
|
||||
|
||||
# source_change is chromecast and there is metadata but no artwork. Bluetooth does support metadata but not artwork
|
||||
# So i assume that it is bluetooth and not chromecast
|
||||
if (
|
||||
hasattr(self._playback_metadata, "art")
|
||||
and self._playback_metadata.art is not None
|
||||
and len(self._playback_metadata.art) == 0
|
||||
and self._source_change.id == BangOlufsenSource.CHROMECAST.id
|
||||
):
|
||||
return BangOlufsenSource.BLUETOOTH.name
|
||||
|
||||
return self._source_change.name
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return information that is not returned anywhere else."""
|
||||
attributes: dict[str, Any] = {}
|
||||
|
||||
# Add Beolink attributes
|
||||
if self._beolink_attributes:
|
||||
attributes.update(self._beolink_attributes)
|
||||
|
||||
return attributes
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Set the device to "networkStandby"."""
|
||||
await self._client.post_standby()
|
||||
@@ -876,23 +972,30 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Beolink compatible B&O device.
|
||||
# Repeated presses / calls will cycle between compatible playing devices.
|
||||
if len(group_members) == 0:
|
||||
await self._async_beolink_join()
|
||||
await self.async_beolink_join()
|
||||
return
|
||||
|
||||
# Get JID for each group member
|
||||
jids = [self._get_beolink_jid(group_member) for group_member in group_members]
|
||||
await self._async_beolink_expand(jids)
|
||||
await self.async_beolink_expand(jids)
|
||||
|
||||
async def async_unjoin_player(self) -> None:
|
||||
"""Unjoin Beolink session. End session if leader."""
|
||||
await self._async_beolink_leave()
|
||||
await self.async_beolink_leave()
|
||||
|
||||
async def _async_beolink_join(self) -> None:
|
||||
# Custom actions:
|
||||
async def async_beolink_join(self, beolink_jid: str | None = None) -> None:
|
||||
"""Join a Beolink multi-room experience."""
|
||||
await self._client.join_latest_beolink_experience()
|
||||
if beolink_jid is None:
|
||||
await self._client.join_latest_beolink_experience()
|
||||
else:
|
||||
await self._client.join_beolink_peer(jid=beolink_jid)
|
||||
|
||||
async def _async_beolink_expand(self, beolink_jids: list[str]) -> None:
|
||||
async def async_beolink_expand(
|
||||
self, beolink_jids: list[str] | None = None, all_discovered: bool = False
|
||||
) -> None:
|
||||
"""Expand a Beolink multi-room experience with a device or devices."""
|
||||
|
||||
# Ensure that the current source is expandable
|
||||
if not self._beolink_sources[cast(str, self._source_change.id)]:
|
||||
raise ServiceValidationError(
|
||||
@@ -904,10 +1007,37 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
},
|
||||
)
|
||||
|
||||
# Try to expand to all defined devices
|
||||
for beolink_jid in beolink_jids:
|
||||
await self._client.post_beolink_expand(jid=beolink_jid)
|
||||
# Expand to all discovered devices
|
||||
if all_discovered:
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
async def _async_beolink_leave(self) -> None:
|
||||
for peer in peers:
|
||||
try:
|
||||
await self._client.post_beolink_expand(jid=peer.jid)
|
||||
except NotFoundException:
|
||||
_LOGGER.warning("Unable to expand to %s", peer.jid)
|
||||
|
||||
# Try to expand to all defined devices
|
||||
elif beolink_jids:
|
||||
for beolink_jid in beolink_jids:
|
||||
try:
|
||||
await self._client.post_beolink_expand(jid=beolink_jid)
|
||||
except NotFoundException:
|
||||
_LOGGER.warning(
|
||||
"Unable to expand to %s. Is the device available on the network?",
|
||||
beolink_jid,
|
||||
)
|
||||
|
||||
async def async_beolink_unexpand(self, beolink_jids: list[str]) -> None:
|
||||
"""Unexpand a Beolink multi-room experience with a device or devices."""
|
||||
# Unexpand all defined devices
|
||||
for beolink_jid in beolink_jids:
|
||||
await self._client.post_beolink_unexpand(jid=beolink_jid)
|
||||
|
||||
async def async_beolink_leave(self) -> None:
|
||||
"""Leave the current Beolink experience."""
|
||||
await self._client.post_beolink_leave()
|
||||
|
||||
async def async_beolink_allstandby(self) -> None:
|
||||
"""Set all connected Beolink devices to standby."""
|
||||
await self._client.post_beolink_allstandby()
|
||||
|
79
homeassistant/components/bang_olufsen/services.yaml
Normal file
79
homeassistant/components/bang_olufsen/services.yaml
Normal file
@@ -0,0 +1,79 @@
|
||||
beolink_allstandby:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_expand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
all_discovered:
|
||||
required: false
|
||||
example: false
|
||||
selector:
|
||||
boolean:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jids:
|
||||
required: false
|
||||
example: >-
|
||||
[
|
||||
1111.2222222.33333333@products.bang-olufsen.com,
|
||||
4444.5555555.66666666@products.bang-olufsen.com
|
||||
]
|
||||
selector:
|
||||
object:
|
||||
|
||||
beolink_join:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jid:
|
||||
required: false
|
||||
example: 1111.2222222.33333333@products.bang-olufsen.com
|
||||
selector:
|
||||
text:
|
||||
|
||||
beolink_leave:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
|
||||
beolink_unexpand:
|
||||
target:
|
||||
entity:
|
||||
integration: bang_olufsen
|
||||
domain: media_player
|
||||
device:
|
||||
integration: bang_olufsen
|
||||
fields:
|
||||
jid_options:
|
||||
collapsed: false
|
||||
fields:
|
||||
beolink_jids:
|
||||
required: true
|
||||
example: >-
|
||||
[
|
||||
1111.2222222.33333333@products.bang-olufsen.com,
|
||||
4444.5555555.66666666@products.bang-olufsen.com
|
||||
]
|
||||
selector:
|
||||
object:
|
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"jid_options_name": "JID options",
|
||||
"jid_options_description": "Advanced grouping options, where devices' unique Beolink IDs (Called JIDs) are used directly. JIDs can be found in the state attributes of the media player entity."
|
||||
},
|
||||
"config": {
|
||||
"error": {
|
||||
"api_exception": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -25,6 +29,68 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"beolink_allstandby": {
|
||||
"name": "Beolink all standby",
|
||||
"description": "Set all Connected Beolink devices to standby."
|
||||
},
|
||||
"beolink_expand": {
|
||||
"name": "Beolink expand",
|
||||
"description": "Expand current Beolink experience.",
|
||||
"fields": {
|
||||
"all_discovered": {
|
||||
"name": "All discovered",
|
||||
"description": "Expand Beolink experience to all discovered devices."
|
||||
},
|
||||
"beolink_jids": {
|
||||
"name": "Beolink JIDs",
|
||||
"description": "Specify which Beolink JIDs will join current Beolink experience."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beolink_join": {
|
||||
"name": "Beolink join",
|
||||
"description": "Join a Beolink experience.",
|
||||
"fields": {
|
||||
"beolink_jid": {
|
||||
"name": "Beolink JID",
|
||||
"description": "Manually specify Beolink JID to join."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"beolink_leave": {
|
||||
"name": "Beolink leave",
|
||||
"description": "Leave a Beolink experience."
|
||||
},
|
||||
"beolink_unexpand": {
|
||||
"name": "Beolink unexpand",
|
||||
"description": "Unexpand from current Beolink experience.",
|
||||
"fields": {
|
||||
"beolink_jids": {
|
||||
"name": "Beolink JIDs",
|
||||
"description": "Specify which Beolink JIDs will leave from current Beolink experience."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"jid_options": {
|
||||
"name": "[%key:component::bang_olufsen::common::jid_options_name%]",
|
||||
"description": "[%key:component::bang_olufsen::common::jid_options_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"m3u_invalid_format": {
|
||||
"message": "Media sources with the .m3u extension are not supported."
|
||||
|
@@ -63,6 +63,9 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_playback_progress_notifications(
|
||||
self.on_playback_progress_notification
|
||||
)
|
||||
self._client.get_playback_source_notifications(
|
||||
self.on_playback_source_notification
|
||||
)
|
||||
self._client.get_playback_state_notifications(
|
||||
self.on_playback_state_notification
|
||||
)
|
||||
@@ -117,6 +120,11 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.CONFIGURATION:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
@@ -157,6 +165,14 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_playback_source_notification(self, notification: Source) -> None:
|
||||
"""Send playback_source dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_source_change_notification(self, notification: Source) -> None:
|
||||
"""Send source_change dispatch."""
|
||||
async_dispatcher_send(
|
||||
|
@@ -10,7 +10,11 @@ from homeassistant.components.sensor import (
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import EntityCategory, UnitOfTemperature
|
||||
from homeassistant.const import (
|
||||
SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
EntityCategory,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
@@ -32,6 +36,8 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key=TYPE_WIFI_STRENGTH,
|
||||
translation_key="wifi_strength",
|
||||
native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT,
|
||||
device_class=SensorDeviceClass.SIGNAL_STRENGTH,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
|
@@ -364,12 +364,13 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
if self.is_grouped and not self.is_master:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
status = self._status.state
|
||||
if status in ("pause", "stop"):
|
||||
return MediaPlayerState.PAUSED
|
||||
if status in ("stream", "play"):
|
||||
return MediaPlayerState.PLAYING
|
||||
return MediaPlayerState.IDLE
|
||||
match self._status.state:
|
||||
case "pause":
|
||||
return MediaPlayerState.PAUSED
|
||||
case "stream" | "play":
|
||||
return MediaPlayerState.PLAYING
|
||||
case _:
|
||||
return MediaPlayerState.IDLE
|
||||
|
||||
@property
|
||||
def media_title(self) -> str | None:
|
||||
@@ -769,7 +770,7 @@ class BluesoundPlayer(MediaPlayerEntity):
|
||||
|
||||
async def async_set_volume_level(self, volume: float) -> None:
|
||||
"""Send volume_up command to media player."""
|
||||
volume = int(volume * 100)
|
||||
volume = int(round(volume * 100))
|
||||
volume = min(100, volume)
|
||||
volume = max(0, volume)
|
||||
|
||||
|
@@ -21,7 +21,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_SOURCE, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -153,10 +153,10 @@ class BMWConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> BMWOptionsFlow:
|
||||
"""Return a MyBMW option flow."""
|
||||
return BMWOptionsFlow(config_entry)
|
||||
return BMWOptionsFlow()
|
||||
|
||||
|
||||
class BMWOptionsFlow(OptionsFlowWithConfigEntry):
|
||||
class BMWOptionsFlow(OptionsFlow):
|
||||
"""Handle a option flow for MyBMW."""
|
||||
|
||||
async def async_step_init(
|
||||
|
@@ -16,7 +16,8 @@
|
||||
"list_access": {
|
||||
"default": "mdi:account-lock",
|
||||
"state": {
|
||||
"shared": "mdi:account-group"
|
||||
"shared": "mdi:account-group",
|
||||
"invitation": "mdi:account-multiple-plus"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -79,7 +79,7 @@ SENSOR_DESCRIPTIONS: tuple[BringSensorEntityDescription, ...] = (
|
||||
translation_key=BringSensor.LIST_ACCESS,
|
||||
value_fn=lambda lst, _: lst["status"].lower(),
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
options=["registered", "shared"],
|
||||
options=["registered", "shared", "invitation"],
|
||||
device_class=SensorDeviceClass.ENUM,
|
||||
),
|
||||
)
|
||||
|
@@ -66,7 +66,8 @@
|
||||
"name": "List access",
|
||||
"state": {
|
||||
"registered": "Private",
|
||||
"shared": "Shared"
|
||||
"shared": "Shared",
|
||||
"invitation": "Invitation pending"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==0.6.4"]
|
||||
"requirements": ["python-bsblan==1.2.1"]
|
||||
}
|
||||
|
@@ -109,6 +109,7 @@ async def async_setup_platform(
|
||||
entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass)
|
||||
coordinator = CalDavUpdateCoordinator(
|
||||
hass,
|
||||
None,
|
||||
calendar=calendar,
|
||||
days=days,
|
||||
include_all_day=True,
|
||||
@@ -126,6 +127,7 @@ async def async_setup_platform(
|
||||
entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device_id, hass=hass)
|
||||
coordinator = CalDavUpdateCoordinator(
|
||||
hass,
|
||||
None,
|
||||
calendar=calendar,
|
||||
days=days,
|
||||
include_all_day=False,
|
||||
@@ -152,6 +154,7 @@ async def async_setup_entry(
|
||||
async_generate_entity_id(ENTITY_ID_FORMAT, calendar.name, hass=hass),
|
||||
CalDavUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
calendar=calendar,
|
||||
days=CONFIG_ENTRY_DEFAULT_DAYS,
|
||||
include_all_day=True,
|
||||
@@ -204,7 +207,8 @@ class WebDavCalendarEntity(CoordinatorEntity[CalDavUpdateCoordinator], CalendarE
|
||||
if self._supports_offset:
|
||||
self._attr_extra_state_attributes = {
|
||||
"offset_reached": is_offset_reached(
|
||||
self._event.start_datetime_local, self.coordinator.offset
|
||||
self._event.start_datetime_local,
|
||||
self.coordinator.offset, # type: ignore[arg-type]
|
||||
)
|
||||
if self._event
|
||||
else False
|
||||
|
@@ -6,6 +6,9 @@ from datetime import date, datetime, time, timedelta
|
||||
from functools import partial
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import caldav
|
||||
|
||||
from homeassistant.components.calendar import CalendarEvent, extract_offset
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -14,6 +17,9 @@ from homeassistant.util import dt as dt_util
|
||||
|
||||
from .api import get_attr_value
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from . import CalDavConfigEntry
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
|
||||
@@ -23,11 +29,20 @@ OFFSET = "!!"
|
||||
class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]):
|
||||
"""Class to utilize the calendar dav client object to get next event."""
|
||||
|
||||
def __init__(self, hass, calendar, days, include_all_day, search):
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
entry: CalDavConfigEntry | None,
|
||||
calendar: caldav.Calendar,
|
||||
days: int,
|
||||
include_all_day: bool,
|
||||
search: str | None,
|
||||
) -> None:
|
||||
"""Set up how we are going to search the WebDav calendar."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=entry,
|
||||
name=f"CalDAV {calendar.name}",
|
||||
update_interval=MIN_TIME_BETWEEN_UPDATES,
|
||||
)
|
||||
@@ -35,7 +50,7 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]):
|
||||
self.days = days
|
||||
self.include_all_day = include_all_day
|
||||
self.search = search
|
||||
self.offset = None
|
||||
self.offset: timedelta | None = None
|
||||
|
||||
async def async_get_events(
|
||||
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
|
||||
@@ -109,7 +124,7 @@ class CalDavUpdateCoordinator(DataUpdateCoordinator[CalendarEvent | None]):
|
||||
_start_of_tomorrow = start_of_tomorrow
|
||||
if _start_of_today <= start_dt < _start_of_tomorrow:
|
||||
new_event = event.copy()
|
||||
new_vevent = new_event.instance.vevent
|
||||
new_vevent = new_event.instance.vevent # type: ignore[attr-defined]
|
||||
if hasattr(new_vevent, "dtend"):
|
||||
dur = new_vevent.dtend.value - new_vevent.dtstart.value
|
||||
new_vevent.dtend.value = start_dt + dur
|
||||
|
@@ -421,8 +421,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
if hass.config.webrtc.ice_servers:
|
||||
return hass.config.webrtc.ice_servers
|
||||
return [
|
||||
RTCIceServer(urls="stun:stun.home-assistant.io:80"),
|
||||
RTCIceServer(urls="stun:stun.home-assistant.io:3478"),
|
||||
RTCIceServer(
|
||||
urls=[
|
||||
"stun:stun.home-assistant.io:80",
|
||||
"stun:stun.home-assistant.io:3478",
|
||||
]
|
||||
),
|
||||
]
|
||||
|
||||
async_register_ice_servers(hass, get_ice_servers)
|
||||
@@ -472,6 +476,8 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
_attr_state: None = None # State is determined by is_on
|
||||
_attr_supported_features: CameraEntityFeature = CameraEntityFeature(0)
|
||||
|
||||
__supports_stream: CameraEntityFeature | None = None
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize a camera."""
|
||||
self._cache: dict[str, Any] = {}
|
||||
@@ -783,6 +789,9 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
async def async_internal_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_internal_added_to_hass()
|
||||
self.__supports_stream = (
|
||||
self.supported_features_compat & CameraEntityFeature.STREAM
|
||||
)
|
||||
await self.async_refresh_providers(write_state=False)
|
||||
|
||||
async def async_refresh_providers(self, *, write_state: bool = True) -> None:
|
||||
@@ -848,7 +857,10 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
]
|
||||
config.configuration.ice_servers.extend(ice_servers)
|
||||
|
||||
config.get_candidates_upfront = self._legacy_webrtc_provider is not None
|
||||
config.get_candidates_upfront = (
|
||||
self._supports_native_sync_webrtc
|
||||
or self._legacy_webrtc_provider is not None
|
||||
)
|
||||
|
||||
return config
|
||||
|
||||
@@ -889,6 +901,21 @@ class Camera(Entity, cached_properties=CACHED_PROPERTIES_WITH_ATTR_):
|
||||
|
||||
return CameraCapabilities(frontend_stream_types)
|
||||
|
||||
@callback
|
||||
def async_write_ha_state(self) -> None:
|
||||
"""Write the state to the state machine.
|
||||
|
||||
Schedules async_refresh_providers if support of streams have changed.
|
||||
"""
|
||||
super().async_write_ha_state()
|
||||
if self.__supports_stream != (
|
||||
supports_stream := self.supported_features_compat
|
||||
& CameraEntityFeature.STREAM
|
||||
):
|
||||
self.__supports_stream = supports_stream
|
||||
self._invalidate_camera_capabilities_cache()
|
||||
self.hass.async_create_task(self.async_refresh_providers())
|
||||
|
||||
|
||||
class CameraView(HomeAssistantView):
|
||||
"""Base CameraView."""
|
||||
|
@@ -52,7 +52,7 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return CanaryOptionsFlowHandler(config_entry)
|
||||
return CanaryOptionsFlowHandler()
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Handle a flow initiated by configuration file."""
|
||||
@@ -104,10 +104,6 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
class CanaryOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle Canary client options."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -41,7 +41,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> CastOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return CastOptionsFlowHandler(config_entry)
|
||||
return CastOptionsFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -109,9 +109,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
class CastOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle Google Cast options."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
def __init__(self) -> None:
|
||||
"""Initialize Google Cast options flow."""
|
||||
self.config_entry = config_entry
|
||||
self.updated_config: dict[str, Any] = {}
|
||||
|
||||
async def async_step_init(self, user_input: None = None) -> ConfigFlowResult:
|
||||
|
@@ -31,6 +31,7 @@ PREF_GOOGLE_REPORT_STATE = "google_report_state"
|
||||
PREF_ALEXA_ENTITY_CONFIGS = "alexa_entity_configs"
|
||||
PREF_ALEXA_REPORT_STATE = "alexa_report_state"
|
||||
PREF_DISABLE_2FA = "disable_2fa"
|
||||
PREF_ENABLE_BACKUP_SYNC = "backup_sync_enabled"
|
||||
PREF_INSTANCE_ID = "instance_id"
|
||||
PREF_SHOULD_EXPOSE = "should_expose"
|
||||
PREF_GOOGLE_LOCAL_WEBHOOK_ID = "google_local_webhook_id"
|
||||
|
@@ -42,6 +42,7 @@ from .const import (
|
||||
PREF_ALEXA_REPORT_STATE,
|
||||
PREF_DISABLE_2FA,
|
||||
PREF_ENABLE_ALEXA,
|
||||
PREF_ENABLE_BACKUP_SYNC,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS,
|
||||
PREF_ENABLE_GOOGLE,
|
||||
PREF_GOOGLE_REPORT_STATE,
|
||||
@@ -440,16 +441,17 @@ def validate_language_voice(value: tuple[str, str]) -> tuple[str, str]:
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "cloud/update_prefs",
|
||||
vol.Optional(PREF_ENABLE_GOOGLE): bool,
|
||||
vol.Optional(PREF_ENABLE_ALEXA): bool,
|
||||
vol.Optional(PREF_ALEXA_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_ENABLE_ALEXA): bool,
|
||||
vol.Optional(PREF_ENABLE_BACKUP_SYNC): bool,
|
||||
vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool,
|
||||
vol.Optional(PREF_ENABLE_GOOGLE): bool,
|
||||
vol.Optional(PREF_GOOGLE_REPORT_STATE): bool,
|
||||
vol.Optional(PREF_GOOGLE_SECURE_DEVICES_PIN): vol.Any(None, str),
|
||||
vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool,
|
||||
vol.Optional(PREF_TTS_DEFAULT_VOICE): vol.All(
|
||||
vol.Coerce(tuple), validate_language_voice
|
||||
),
|
||||
vol.Optional(PREF_REMOTE_ALLOW_REMOTE_ENABLE): bool,
|
||||
vol.Optional(PREF_ENABLE_CLOUD_ICE_SERVERS): bool,
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
|
@@ -8,6 +8,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["hass_nabucasa"],
|
||||
"requirements": ["hass-nabucasa==0.83.0"],
|
||||
"requirements": ["hass-nabucasa==0.84.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -32,6 +32,7 @@ from .const import (
|
||||
PREF_CLOUD_USER,
|
||||
PREF_CLOUDHOOKS,
|
||||
PREF_ENABLE_ALEXA,
|
||||
PREF_ENABLE_BACKUP_SYNC,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS,
|
||||
PREF_ENABLE_GOOGLE,
|
||||
PREF_ENABLE_REMOTE,
|
||||
@@ -163,21 +164,22 @@ class CloudPreferences:
|
||||
async def async_update(
|
||||
self,
|
||||
*,
|
||||
google_enabled: bool | UndefinedType = UNDEFINED,
|
||||
alexa_enabled: bool | UndefinedType = UNDEFINED,
|
||||
remote_enabled: bool | UndefinedType = UNDEFINED,
|
||||
google_secure_devices_pin: str | None | UndefinedType = UNDEFINED,
|
||||
cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED,
|
||||
cloud_user: str | UndefinedType = UNDEFINED,
|
||||
alexa_report_state: bool | UndefinedType = UNDEFINED,
|
||||
google_report_state: bool | UndefinedType = UNDEFINED,
|
||||
tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED,
|
||||
remote_domain: str | None | UndefinedType = UNDEFINED,
|
||||
alexa_settings_version: int | UndefinedType = UNDEFINED,
|
||||
google_settings_version: int | UndefinedType = UNDEFINED,
|
||||
google_connected: bool | UndefinedType = UNDEFINED,
|
||||
remote_allow_remote_enable: bool | UndefinedType = UNDEFINED,
|
||||
backup_sync_enabled: bool | UndefinedType = UNDEFINED,
|
||||
cloud_ice_servers_enabled: bool | UndefinedType = UNDEFINED,
|
||||
cloud_user: str | UndefinedType = UNDEFINED,
|
||||
cloudhooks: dict[str, dict[str, str | bool]] | UndefinedType = UNDEFINED,
|
||||
google_connected: bool | UndefinedType = UNDEFINED,
|
||||
google_enabled: bool | UndefinedType = UNDEFINED,
|
||||
google_report_state: bool | UndefinedType = UNDEFINED,
|
||||
google_secure_devices_pin: str | None | UndefinedType = UNDEFINED,
|
||||
google_settings_version: int | UndefinedType = UNDEFINED,
|
||||
remote_allow_remote_enable: bool | UndefinedType = UNDEFINED,
|
||||
remote_domain: str | None | UndefinedType = UNDEFINED,
|
||||
remote_enabled: bool | UndefinedType = UNDEFINED,
|
||||
tts_default_voice: tuple[str, str] | UndefinedType = UNDEFINED,
|
||||
) -> None:
|
||||
"""Update user preferences."""
|
||||
prefs = {**self._prefs}
|
||||
@@ -186,21 +188,22 @@ class CloudPreferences:
|
||||
{
|
||||
key: value
|
||||
for key, value in (
|
||||
(PREF_ENABLE_GOOGLE, google_enabled),
|
||||
(PREF_ENABLE_ALEXA, alexa_enabled),
|
||||
(PREF_ENABLE_REMOTE, remote_enabled),
|
||||
(PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin),
|
||||
(PREF_CLOUDHOOKS, cloudhooks),
|
||||
(PREF_CLOUD_USER, cloud_user),
|
||||
(PREF_ALEXA_REPORT_STATE, alexa_report_state),
|
||||
(PREF_GOOGLE_REPORT_STATE, google_report_state),
|
||||
(PREF_ALEXA_SETTINGS_VERSION, alexa_settings_version),
|
||||
(PREF_GOOGLE_SETTINGS_VERSION, google_settings_version),
|
||||
(PREF_TTS_DEFAULT_VOICE, tts_default_voice),
|
||||
(PREF_REMOTE_DOMAIN, remote_domain),
|
||||
(PREF_GOOGLE_CONNECTED, google_connected),
|
||||
(PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable),
|
||||
(PREF_CLOUD_USER, cloud_user),
|
||||
(PREF_CLOUDHOOKS, cloudhooks),
|
||||
(PREF_ENABLE_ALEXA, alexa_enabled),
|
||||
(PREF_ENABLE_BACKUP_SYNC, backup_sync_enabled),
|
||||
(PREF_ENABLE_CLOUD_ICE_SERVERS, cloud_ice_servers_enabled),
|
||||
(PREF_ENABLE_GOOGLE, google_enabled),
|
||||
(PREF_ENABLE_REMOTE, remote_enabled),
|
||||
(PREF_GOOGLE_CONNECTED, google_connected),
|
||||
(PREF_GOOGLE_REPORT_STATE, google_report_state),
|
||||
(PREF_GOOGLE_SECURE_DEVICES_PIN, google_secure_devices_pin),
|
||||
(PREF_GOOGLE_SETTINGS_VERSION, google_settings_version),
|
||||
(PREF_REMOTE_ALLOW_REMOTE_ENABLE, remote_allow_remote_enable),
|
||||
(PREF_REMOTE_DOMAIN, remote_domain),
|
||||
(PREF_TTS_DEFAULT_VOICE, tts_default_voice),
|
||||
)
|
||||
if value is not UNDEFINED
|
||||
}
|
||||
@@ -242,6 +245,8 @@ class CloudPreferences:
|
||||
PREF_ALEXA_REPORT_STATE: self.alexa_report_state,
|
||||
PREF_CLOUDHOOKS: self.cloudhooks,
|
||||
PREF_ENABLE_ALEXA: self.alexa_enabled,
|
||||
PREF_ENABLE_BACKUP_SYNC: self.backup_sync_enabled,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled,
|
||||
PREF_ENABLE_GOOGLE: self.google_enabled,
|
||||
PREF_ENABLE_REMOTE: self.remote_enabled,
|
||||
PREF_GOOGLE_DEFAULT_EXPOSE: self.google_default_expose,
|
||||
@@ -249,7 +254,6 @@ class CloudPreferences:
|
||||
PREF_GOOGLE_SECURE_DEVICES_PIN: self.google_secure_devices_pin,
|
||||
PREF_REMOTE_ALLOW_REMOTE_ENABLE: self.remote_allow_remote_enable,
|
||||
PREF_TTS_DEFAULT_VOICE: self.tts_default_voice,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS: self.cloud_ice_servers_enabled,
|
||||
}
|
||||
|
||||
@property
|
||||
@@ -374,6 +378,12 @@ class CloudPreferences:
|
||||
)
|
||||
return cloud_ice_servers_enabled
|
||||
|
||||
@property
|
||||
def backup_sync_enabled(self) -> bool:
|
||||
"""Return if backup sync is enabled."""
|
||||
backup_sync_enabled: bool = self._prefs.get(PREF_ENABLE_BACKUP_SYNC, False)
|
||||
return backup_sync_enabled
|
||||
|
||||
async def get_cloud_user(self) -> str:
|
||||
"""Return ID of Home Assistant Cloud system user."""
|
||||
user = await self._load_cloud_user()
|
||||
@@ -419,6 +429,7 @@ class CloudPreferences:
|
||||
PREF_CLOUD_USER: None,
|
||||
PREF_CLOUDHOOKS: {},
|
||||
PREF_ENABLE_ALEXA: True,
|
||||
PREF_ENABLE_BACKUP_SYNC: True,
|
||||
PREF_ENABLE_GOOGLE: True,
|
||||
PREF_ENABLE_REMOTE: False,
|
||||
PREF_ENABLE_CLOUD_ICE_SERVERS: True,
|
||||
|
@@ -158,16 +158,12 @@ class CoinbaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler(config_entry)
|
||||
return OptionsFlowHandler()
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a option flow for Coinbase."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -4,5 +4,5 @@
|
||||
"codeowners": ["@Petro31"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/compensation",
|
||||
"iot_class": "calculated",
|
||||
"requirements": ["numpy==1.26.4"]
|
||||
"requirements": ["numpy==2.1.3"]
|
||||
}
|
||||
|
@@ -154,16 +154,12 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler(config_entry)
|
||||
return OptionsFlowHandler()
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a option flow for Control4."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -294,7 +294,7 @@ class DefaultAgent(ConversationEntity):
|
||||
self.hass, language, DOMAIN, [DOMAIN]
|
||||
)
|
||||
response_text = translations.get(
|
||||
f"component.{DOMAIN}.agent.done", "Done"
|
||||
f"component.{DOMAIN}.conversation.agent.done", "Done"
|
||||
)
|
||||
|
||||
response.async_set_speech(response_text)
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.10.30"]
|
||||
"requirements": ["hassil==1.7.4", "home-assistant-intents==2024.11.6"]
|
||||
}
|
||||
|
@@ -213,18 +213,19 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow):
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize Crownstone options."""
|
||||
super().__init__(OPTIONS_FLOW, self.async_create_new_entry)
|
||||
self.entry = config_entry
|
||||
self.updated_options = config_entry.options.copy()
|
||||
self.options = config_entry.options.copy()
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage Crownstone options."""
|
||||
self.cloud: CrownstoneCloud = self.hass.data[DOMAIN][self.entry.entry_id].cloud
|
||||
self.cloud: CrownstoneCloud = self.hass.data[DOMAIN][
|
||||
self.config_entry.entry_id
|
||||
].cloud
|
||||
|
||||
spheres = {sphere.name: sphere.cloud_id for sphere in self.cloud.cloud_data}
|
||||
usb_path = self.entry.options.get(CONF_USB_PATH)
|
||||
usb_sphere = self.entry.options.get(CONF_USB_SPHERE)
|
||||
usb_path = self.config_entry.options.get(CONF_USB_PATH)
|
||||
usb_sphere = self.config_entry.options.get(CONF_USB_SPHERE)
|
||||
|
||||
options_schema = vol.Schema(
|
||||
{vol.Optional(CONF_USE_USB_OPTION, default=usb_path is not None): bool}
|
||||
@@ -243,14 +244,14 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow):
|
||||
if user_input[CONF_USE_USB_OPTION] and usb_path is None:
|
||||
return await self.async_step_usb_config()
|
||||
if not user_input[CONF_USE_USB_OPTION] and usb_path is not None:
|
||||
self.updated_options[CONF_USB_PATH] = None
|
||||
self.updated_options[CONF_USB_SPHERE] = None
|
||||
self.options[CONF_USB_PATH] = None
|
||||
self.options[CONF_USB_SPHERE] = None
|
||||
elif (
|
||||
CONF_USB_SPHERE_OPTION in user_input
|
||||
and spheres[user_input[CONF_USB_SPHERE_OPTION]] != usb_sphere
|
||||
):
|
||||
sphere_id = spheres[user_input[CONF_USB_SPHERE_OPTION]]
|
||||
self.updated_options[CONF_USB_SPHERE] = sphere_id
|
||||
self.options[CONF_USB_SPHERE] = sphere_id
|
||||
|
||||
return self.async_create_new_entry()
|
||||
|
||||
@@ -260,7 +261,7 @@ class CrownstoneOptionsFlowHandler(BaseCrownstoneFlowHandler, OptionsFlow):
|
||||
"""Create a new entry."""
|
||||
# these attributes will only change when a usb was configured
|
||||
if self.usb_path is not None and self.usb_sphere_id is not None:
|
||||
self.updated_options[CONF_USB_PATH] = self.usb_path
|
||||
self.updated_options[CONF_USB_SPHERE] = self.usb_sphere_id
|
||||
self.options[CONF_USB_PATH] = self.usb_path
|
||||
self.options[CONF_USB_SPHERE] = self.usb_sphere_id
|
||||
|
||||
return super().async_create_entry(title="", data=self.updated_options)
|
||||
return super().async_create_entry(title="", data=self.options)
|
||||
|
@@ -74,9 +74,11 @@ class DeconzFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> DeconzOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return DeconzOptionsFlowHandler(config_entry)
|
||||
return DeconzOptionsFlowHandler()
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the deCONZ config flow."""
|
||||
@@ -299,11 +301,6 @@ class DeconzOptionsFlowHandler(OptionsFlow):
|
||||
|
||||
gateway: DeconzHub
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize deCONZ options flow."""
|
||||
self.config_entry = config_entry
|
||||
self.options = dict(config_entry.options)
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -315,8 +312,7 @@ class DeconzOptionsFlowHandler(OptionsFlow):
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the deconz devices options."""
|
||||
if user_input is not None:
|
||||
self.options.update(user_input)
|
||||
return self.async_create_entry(title="", data=self.options)
|
||||
return self.async_create_entry(data=self.config_entry.options | user_input)
|
||||
|
||||
schema_options = {}
|
||||
for option, default in (
|
||||
|
@@ -47,7 +47,6 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
self.options = dict(config_entry.options)
|
||||
|
||||
async def async_step_init(
|
||||
|
@@ -52,10 +52,6 @@ CONFIG_SCHEMA = vol.Schema({vol.Optional(CONF_HOST): str})
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Options for the component."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Init object."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -119,7 +115,7 @@ class DenonAvrFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow."""
|
||||
return OptionsFlowHandler(config_entry)
|
||||
return OptionsFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
|
@@ -69,16 +69,12 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> DexcomOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return DexcomOptionsFlowHandler(config_entry)
|
||||
return DexcomOptionsFlowHandler()
|
||||
|
||||
|
||||
class DexcomOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a option flow for Dexcom."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -74,7 +74,7 @@ class DlnaDmrFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
"""Define the config flow to handle options."""
|
||||
return DlnaDmrOptionsFlowHandler(config_entry)
|
||||
return DlnaDmrOptionsFlowHandler()
|
||||
|
||||
async def async_step_user(self, user_input: FlowInput = None) -> ConfigFlowResult:
|
||||
"""Handle a flow initialized by the user.
|
||||
@@ -327,10 +327,6 @@ class DlnaDmrOptionsFlowHandler(OptionsFlow):
|
||||
Configures the single instance and updates the existing config entry.
|
||||
"""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -14,7 +14,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_NAME, CONF_PORT
|
||||
from homeassistant.core import callback
|
||||
@@ -101,7 +101,7 @@ class DnsIPConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> DnsIPOptionsFlowHandler:
|
||||
"""Return Option handler."""
|
||||
return DnsIPOptionsFlowHandler(config_entry)
|
||||
return DnsIPOptionsFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -165,7 +165,7 @@ class DnsIPConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
class DnsIPOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class DnsIPOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a option config flow for dnsip integration."""
|
||||
|
||||
async def async_step_init(
|
||||
|
@@ -213,16 +213,12 @@ class DoorBirdConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler(config_entry)
|
||||
return OptionsFlowHandler()
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a option flow for doorbird."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -171,9 +171,11 @@ class DSMRFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> DSMROptionFlowHandler:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> DSMROptionFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return DSMROptionFlowHandler(config_entry)
|
||||
return DSMROptionFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -311,10 +313,6 @@ class DSMRFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
class DSMROptionFlowHandler(OptionsFlow):
|
||||
"""Handle options."""
|
||||
|
||||
def __init__(self, entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.entry = entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -328,7 +326,7 @@ class DSMROptionFlowHandler(OptionsFlow):
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_TIME_BETWEEN_UPDATE,
|
||||
default=self.entry.options.get(
|
||||
default=self.config_entry.options.get(
|
||||
CONF_TIME_BETWEEN_UPDATE, DEFAULT_TIME_BETWEEN_UPDATE
|
||||
),
|
||||
): vol.All(vol.Coerce(int), vol.Range(min=0)),
|
||||
|
@@ -6,9 +6,14 @@ from collections.abc import Awaitable, Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from homeassistant.components.number import NumberEntity, NumberEntityDescription
|
||||
from homeassistant.components.number import (
|
||||
NumberDeviceClass,
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
NumberMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import UnitOfTime
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
|
||||
@@ -54,21 +59,30 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up the ecobee thermostat number entity."""
|
||||
data: EcobeeData = hass.data[DOMAIN]
|
||||
_LOGGER.debug("Adding min time ventilators numbers (if present)")
|
||||
|
||||
async_add_entities(
|
||||
assert data is not None
|
||||
|
||||
entities: list[NumberEntity] = [
|
||||
EcobeeVentilatorMinTime(data, index, numbers)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
for numbers in VENTILATOR_NUMBERS
|
||||
]
|
||||
|
||||
_LOGGER.debug("Adding compressor min temp number (if present)")
|
||||
entities.extend(
|
||||
(
|
||||
EcobeeVentilatorMinTime(data, index, numbers)
|
||||
EcobeeCompressorMinTemp(data, index)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
for numbers in VENTILATOR_NUMBERS
|
||||
),
|
||||
True,
|
||||
if thermostat["settings"]["hasHeatPump"]
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity):
|
||||
"""A number class, representing min time for an ecobee thermostat with ventilator attached."""
|
||||
"""A number class, representing min time for an ecobee thermostat with ventilator attached."""
|
||||
|
||||
entity_description: EcobeeNumberEntityDescription
|
||||
|
||||
@@ -105,3 +119,53 @@ class EcobeeVentilatorMinTime(EcobeeBaseEntity, NumberEntity):
|
||||
"""Set new ventilator Min On Time value."""
|
||||
self.entity_description.set_fn(self.data, self.thermostat_index, int(value))
|
||||
self.update_without_throttle = True
|
||||
|
||||
|
||||
class EcobeeCompressorMinTemp(EcobeeBaseEntity, NumberEntity):
|
||||
"""Minimum outdoor temperature at which the compressor will operate.
|
||||
|
||||
This applies more to air source heat pumps than geothermal. This serves as a safety
|
||||
feature (compressors have a minimum operating temperature) as well as
|
||||
providing the ability to choose fuel in a dual-fuel system (i.e. choose between
|
||||
electrical heat pump and fossil auxiliary heat depending on Time of Use, Solar,
|
||||
etc.).
|
||||
Note that python-ecobee-api refers to this as Aux Cutover Threshold, but Ecobee
|
||||
uses Compressor Protection Min Temp.
|
||||
"""
|
||||
|
||||
_attr_device_class = NumberDeviceClass.TEMPERATURE
|
||||
_attr_has_entity_name = True
|
||||
_attr_icon = "mdi:thermometer-off"
|
||||
_attr_mode = NumberMode.BOX
|
||||
_attr_native_min_value = -25
|
||||
_attr_native_max_value = 66
|
||||
_attr_native_step = 5
|
||||
_attr_native_unit_of_measurement = UnitOfTemperature.FAHRENHEIT
|
||||
_attr_translation_key = "compressor_protection_min_temp"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
data: EcobeeData,
|
||||
thermostat_index: int,
|
||||
) -> None:
|
||||
"""Initialize ecobee compressor min temperature."""
|
||||
super().__init__(data, thermostat_index)
|
||||
self._attr_unique_id = f"{self.base_unique_id}_compressor_protection_min_temp"
|
||||
self.update_without_throttle = False
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Get the latest state from the thermostat."""
|
||||
if self.update_without_throttle:
|
||||
await self.data.update(no_throttle=True)
|
||||
self.update_without_throttle = False
|
||||
else:
|
||||
await self.data.update()
|
||||
|
||||
self._attr_native_value = (
|
||||
(self.thermostat["settings"]["compressorProtectionMinTemp"]) / 10
|
||||
)
|
||||
|
||||
def set_native_value(self, value: float) -> None:
|
||||
"""Set new compressor minimum temperature."""
|
||||
self.data.ecobee.set_aux_cutover_threshold(self.thermostat_index, value)
|
||||
self.update_without_throttle = True
|
||||
|
@@ -33,15 +33,18 @@
|
||||
},
|
||||
"number": {
|
||||
"ventilator_min_type_home": {
|
||||
"name": "Ventilator min time home"
|
||||
"name": "Ventilator minimum time home"
|
||||
},
|
||||
"ventilator_min_type_away": {
|
||||
"name": "Ventilator min time away"
|
||||
"name": "Ventilator minimum time away"
|
||||
},
|
||||
"compressor_protection_min_temp": {
|
||||
"name": "Compressor minimum temperature"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"aux_heat_only": {
|
||||
"name": "Aux heat only"
|
||||
"name": "Auxiliary heat only"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -31,25 +31,26 @@ async def async_setup_entry(
|
||||
"""Set up the ecobee thermostat switch entity."""
|
||||
data: EcobeeData = hass.data[DOMAIN]
|
||||
|
||||
async_add_entities(
|
||||
[
|
||||
EcobeeVentilator20MinSwitch(
|
||||
data,
|
||||
index,
|
||||
(await dt_util.async_get_time_zone(thermostat["location"]["timeZone"]))
|
||||
or dt_util.get_default_time_zone(),
|
||||
)
|
||||
entities: list[SwitchEntity] = [
|
||||
EcobeeVentilator20MinSwitch(
|
||||
data,
|
||||
index,
|
||||
(await dt_util.async_get_time_zone(thermostat["location"]["timeZone"]))
|
||||
or dt_util.get_default_time_zone(),
|
||||
)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
]
|
||||
|
||||
entities.extend(
|
||||
(
|
||||
EcobeeSwitchAuxHeatOnly(data, index)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["ventilatorType"] != "none"
|
||||
],
|
||||
update_before_add=True,
|
||||
if thermostat["settings"]["hasHeatPump"]
|
||||
)
|
||||
)
|
||||
|
||||
async_add_entities(
|
||||
EcobeeSwitchAuxHeatOnly(data, index)
|
||||
for index, thermostat in enumerate(data.ecobee.thermostats)
|
||||
if thermostat["settings"]["hasHeatPump"]
|
||||
)
|
||||
async_add_entities(entities, update_before_add=True)
|
||||
|
||||
|
||||
class EcobeeVentilator20MinSwitch(EcobeeBaseEntity, SwitchEntity):
|
||||
|
@@ -14,7 +14,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -103,13 +102,12 @@ class ElevenLabsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return ElevenLabsOptionsFlow(config_entry)
|
||||
|
||||
|
||||
class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry):
|
||||
class ElevenLabsOptionsFlow(OptionsFlow):
|
||||
"""ElevenLabs options flow."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
super().__init__(config_entry)
|
||||
self.api_key: str = self.config_entry.data[CONF_API_KEY]
|
||||
self.api_key: str = config_entry.data[CONF_API_KEY]
|
||||
# id -> name
|
||||
self.voices: dict[str, str] = {}
|
||||
self.models: dict[str, str] = {}
|
||||
@@ -170,7 +168,7 @@ class ElevenLabsOptionsFlow(OptionsFlowWithConfigEntry):
|
||||
vol.Required(CONF_CONFIGURE_VOICE, default=False): bool,
|
||||
}
|
||||
),
|
||||
self.options,
|
||||
self.config_entry.options,
|
||||
)
|
||||
|
||||
async def async_step_voice_settings(
|
||||
|
@@ -5,8 +5,11 @@ from pyemoncms import EmoncmsClient
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN, EMONCMS_UUID_DOC_URL, LOGGER
|
||||
from .coordinator import EmoncmsCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
@@ -14,6 +17,49 @@ PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
type EmonCMSConfigEntry = ConfigEntry[EmoncmsCoordinator]
|
||||
|
||||
|
||||
def _migrate_unique_id(
|
||||
hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_unique_id: str
|
||||
) -> None:
|
||||
"""Migrate to emoncms unique id if needed."""
|
||||
ent_reg = er.async_get(hass)
|
||||
entry_entities = ent_reg.entities.get_entries_for_config_entry_id(entry.entry_id)
|
||||
for entity in entry_entities:
|
||||
if entity.unique_id.split("-")[0] == entry.entry_id:
|
||||
feed_id = entity.unique_id.split("-")[-1]
|
||||
LOGGER.debug(f"moving feed {feed_id} to hardware uuid")
|
||||
ent_reg.async_update_entity(
|
||||
entity.entity_id, new_unique_id=f"{emoncms_unique_id}-{feed_id}"
|
||||
)
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
unique_id=emoncms_unique_id,
|
||||
)
|
||||
|
||||
|
||||
async def _check_unique_id_migration(
|
||||
hass: HomeAssistant, entry: EmonCMSConfigEntry, emoncms_client: EmoncmsClient
|
||||
) -> None:
|
||||
"""Check if we can migrate to the emoncms uuid."""
|
||||
emoncms_unique_id = await emoncms_client.async_get_uuid()
|
||||
if emoncms_unique_id:
|
||||
if entry.unique_id != emoncms_unique_id:
|
||||
_migrate_unique_id(hass, entry, emoncms_unique_id)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"migrate database",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="migrate_database",
|
||||
translation_placeholders={
|
||||
"url": entry.data[CONF_URL],
|
||||
"doc_url": EMONCMS_UUID_DOC_URL,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> bool:
|
||||
"""Load a config entry."""
|
||||
emoncms_client = EmoncmsClient(
|
||||
@@ -21,6 +67,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: EmonCMSConfigEntry) -> b
|
||||
entry.data[CONF_API_KEY],
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
await _check_unique_id_migration(hass, entry, emoncms_client)
|
||||
coordinator = EmoncmsCoordinator(hass, emoncms_client)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
@@ -1,5 +1,7 @@
|
||||
"""Configflow for the emoncms integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from pyemoncms import EmoncmsClient
|
||||
@@ -9,10 +11,10 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_API_KEY, CONF_URL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import selector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
@@ -46,13 +48,10 @@ def sensor_name(url: str) -> str:
|
||||
return f"emoncms@{sensorip}"
|
||||
|
||||
|
||||
async def get_feed_list(hass: HomeAssistant, url: str, api_key: str) -> dict[str, Any]:
|
||||
async def get_feed_list(
|
||||
emoncms_client: EmoncmsClient,
|
||||
) -> dict[str, Any]:
|
||||
"""Check connection to emoncms and return feed list if successful."""
|
||||
emoncms_client = EmoncmsClient(
|
||||
url,
|
||||
api_key,
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
return await emoncms_client.async_request("/feed/list.json")
|
||||
|
||||
|
||||
@@ -68,7 +67,7 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@callback
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowWithConfigEntry:
|
||||
) -> EmoncmsOptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return EmoncmsOptionsFlow(config_entry)
|
||||
|
||||
@@ -77,23 +76,28 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
) -> ConfigFlowResult:
|
||||
"""Initiate a flow via the UI."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders = {}
|
||||
|
||||
if user_input is not None:
|
||||
self.url = user_input[CONF_URL]
|
||||
self.api_key = user_input[CONF_API_KEY]
|
||||
self._async_abort_entries_match(
|
||||
{
|
||||
CONF_API_KEY: user_input[CONF_API_KEY],
|
||||
CONF_URL: user_input[CONF_URL],
|
||||
CONF_API_KEY: self.api_key,
|
||||
CONF_URL: self.url,
|
||||
}
|
||||
)
|
||||
result = await get_feed_list(
|
||||
self.hass, user_input[CONF_URL], user_input[CONF_API_KEY]
|
||||
emoncms_client = EmoncmsClient(
|
||||
self.url, self.api_key, session=async_get_clientsession(self.hass)
|
||||
)
|
||||
result = await get_feed_list(emoncms_client)
|
||||
if not result[CONF_SUCCESS]:
|
||||
errors["base"] = result[CONF_MESSAGE]
|
||||
errors["base"] = "api_error"
|
||||
description_placeholders = {"details": result[CONF_MESSAGE]}
|
||||
else:
|
||||
self.include_only_feeds = user_input.get(CONF_ONLY_INCLUDE_FEEDID)
|
||||
self.url = user_input[CONF_URL]
|
||||
self.api_key = user_input[CONF_API_KEY]
|
||||
await self.async_set_unique_id(await emoncms_client.async_get_uuid())
|
||||
self._abort_if_unique_id_configured()
|
||||
options = get_options(result[CONF_MESSAGE])
|
||||
self.dropdown = {
|
||||
"options": options,
|
||||
@@ -113,6 +117,7 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
user_input,
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
async def async_step_choose_feeds(
|
||||
@@ -167,32 +172,41 @@ class EmoncmsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return result
|
||||
|
||||
|
||||
class EmoncmsOptionsFlow(OptionsFlowWithConfigEntry):
|
||||
class EmoncmsOptionsFlow(OptionsFlow):
|
||||
"""Emoncms Options flow handler."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize emoncms options flow."""
|
||||
self._url = config_entry.data[CONF_URL]
|
||||
self._api_key = config_entry.data[CONF_API_KEY]
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Manage the options."""
|
||||
errors: dict[str, str] = {}
|
||||
data = self.options if self.options else self._config_entry.data
|
||||
url = data[CONF_URL]
|
||||
api_key = data[CONF_API_KEY]
|
||||
include_only_feeds = data.get(CONF_ONLY_INCLUDE_FEEDID, [])
|
||||
description_placeholders = {}
|
||||
include_only_feeds = self.config_entry.options.get(
|
||||
CONF_ONLY_INCLUDE_FEEDID,
|
||||
self.config_entry.data.get(CONF_ONLY_INCLUDE_FEEDID, []),
|
||||
)
|
||||
options: list = include_only_feeds
|
||||
result = await get_feed_list(self.hass, url, api_key)
|
||||
emoncms_client = EmoncmsClient(
|
||||
self._url,
|
||||
self._api_key,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
result = await get_feed_list(emoncms_client)
|
||||
if not result[CONF_SUCCESS]:
|
||||
errors["base"] = result[CONF_MESSAGE]
|
||||
errors["base"] = "api_error"
|
||||
description_placeholders = {"details": result[CONF_MESSAGE]}
|
||||
else:
|
||||
options = get_options(result[CONF_MESSAGE])
|
||||
dropdown = {"options": options, "mode": "dropdown", "multiple": True}
|
||||
if user_input:
|
||||
include_only_feeds = user_input[CONF_ONLY_INCLUDE_FEEDID]
|
||||
return self.async_create_entry(
|
||||
title=sensor_name(url),
|
||||
data={
|
||||
CONF_URL: url,
|
||||
CONF_API_KEY: api_key,
|
||||
CONF_ONLY_INCLUDE_FEEDID: include_only_feeds,
|
||||
},
|
||||
)
|
||||
@@ -207,4 +221,5 @@ class EmoncmsOptionsFlow(OptionsFlowWithConfigEntry):
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
@@ -7,6 +7,10 @@ CONF_ONLY_INCLUDE_FEEDID = "include_only_feed_id"
|
||||
CONF_MESSAGE = "message"
|
||||
CONF_SUCCESS = "success"
|
||||
DOMAIN = "emoncms"
|
||||
EMONCMS_UUID_DOC_URL = (
|
||||
"https://docs.openenergymonitor.org/emoncms/update.html"
|
||||
"#upgrading-to-a-version-producing-a-unique-identifier"
|
||||
)
|
||||
FEED_ID = "id"
|
||||
FEED_NAME = "name"
|
||||
FEED_TAG = "tag"
|
||||
|
@@ -138,29 +138,30 @@ async def async_setup_entry(
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the emoncms sensors."""
|
||||
config = entry.options if entry.options else entry.data
|
||||
name = sensor_name(config[CONF_URL])
|
||||
exclude_feeds = config.get(CONF_EXCLUDE_FEEDID)
|
||||
include_only_feeds = config.get(CONF_ONLY_INCLUDE_FEEDID)
|
||||
name = sensor_name(entry.data[CONF_URL])
|
||||
exclude_feeds = entry.data.get(CONF_EXCLUDE_FEEDID)
|
||||
include_only_feeds = entry.options.get(
|
||||
CONF_ONLY_INCLUDE_FEEDID, entry.data.get(CONF_ONLY_INCLUDE_FEEDID)
|
||||
)
|
||||
|
||||
if exclude_feeds is None and include_only_feeds is None:
|
||||
return
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
# uuid was added in emoncms database 11.5.7
|
||||
unique_id = entry.unique_id if entry.unique_id else entry.entry_id
|
||||
elems = coordinator.data
|
||||
if not elems:
|
||||
return
|
||||
|
||||
sensors: list[EmonCmsSensor] = []
|
||||
|
||||
for idx, elem in enumerate(elems):
|
||||
if include_only_feeds is not None and elem[FEED_ID] not in include_only_feeds:
|
||||
continue
|
||||
|
||||
sensors.append(
|
||||
EmonCmsSensor(
|
||||
coordinator,
|
||||
entry.entry_id,
|
||||
unique_id,
|
||||
elem["unit"],
|
||||
name,
|
||||
idx,
|
||||
@@ -175,7 +176,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EmoncmsCoordinator,
|
||||
entry_id: str,
|
||||
unique_id: str,
|
||||
unit_of_measurement: str | None,
|
||||
name: str,
|
||||
idx: int,
|
||||
@@ -188,7 +189,7 @@ class EmonCmsSensor(CoordinatorEntity[EmoncmsCoordinator], SensorEntity):
|
||||
elem = self.coordinator.data[self.idx]
|
||||
self._attr_name = f"{name} {elem[FEED_NAME]}"
|
||||
self._attr_native_unit_of_measurement = unit_of_measurement
|
||||
self._attr_unique_id = f"{entry_id}-{elem[FEED_ID]}"
|
||||
self._attr_unique_id = f"{unique_id}-{elem[FEED_ID]}"
|
||||
if unit_of_measurement in ("kWh", "Wh"):
|
||||
self._attr_device_class = SensorDeviceClass.ENERGY
|
||||
self._attr_state_class = SensorStateClass.TOTAL_INCREASING
|
||||
|
@@ -1,5 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"error": {
|
||||
"api_error": "An error occured in the pyemoncms API : {details}"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
@@ -16,9 +19,15 @@
|
||||
"include_only_feed_id": "Choose feeds to include"
|
||||
}
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "This server is already configured"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"error": {
|
||||
"api_error": "[%key:component::emoncms::config::error::api_error%]"
|
||||
},
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
@@ -35,6 +44,10 @@
|
||||
"missing_include_only_feed_id": {
|
||||
"title": "No feed synchronized with the {domain} sensor",
|
||||
"description": "Configuring {domain} using YAML is being removed.\n\nPlease add manually the feeds you want to synchronize with the `configure` button of the integration."
|
||||
},
|
||||
"migrate_database": {
|
||||
"title": "Upgrade your emoncms version",
|
||||
"description": "Your [emoncms]({url}) does not ship a unique identifier.\n\n Please upgrade to at least version 11.5.7 and migrate your emoncms database.\n\n More info on [emoncms documentation]({doc_url})"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["sense_energy"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["sense-energy==0.13.2"]
|
||||
"requirements": ["sense-energy==0.13.3"]
|
||||
}
|
||||
|
@@ -331,7 +331,7 @@ class EnergyManager:
|
||||
"device_consumption",
|
||||
):
|
||||
if key in update:
|
||||
data[key] = update[key] # type: ignore[literal-required]
|
||||
data[key] = update[key]
|
||||
|
||||
self.data = data
|
||||
self._store.async_delay_save(lambda: data, 60)
|
||||
|
@@ -16,7 +16,7 @@ from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlowWithConfigEntry,
|
||||
OptionsFlow,
|
||||
)
|
||||
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -66,9 +66,11 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> EnvoyOptionsFlowHandler:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> EnvoyOptionsFlowHandler:
|
||||
"""Options flow handler for Enphase_Envoy."""
|
||||
return EnvoyOptionsFlowHandler(config_entry)
|
||||
return EnvoyOptionsFlowHandler()
|
||||
|
||||
@callback
|
||||
def _async_generate_schema(self) -> vol.Schema:
|
||||
@@ -288,7 +290,7 @@ class EnphaseConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
class EnvoyOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class EnvoyOptionsFlowHandler(OptionsFlow):
|
||||
"""Envoy config flow options handler."""
|
||||
|
||||
async def async_step_init(
|
||||
|
@@ -15,7 +15,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_send
|
||||
|
||||
from .const import DOMAIN, SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED
|
||||
from .const import SIGNAL_THERMOSTAT_CONNECTED, SIGNAL_THERMOSTAT_DISCONNECTED
|
||||
from .models import Eq3Config, Eq3ConfigEntryData
|
||||
|
||||
PLATFORMS = [
|
||||
@@ -25,7 +25,10 @@ PLATFORMS = [
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
type Eq3ConfigEntry = ConfigEntry[Eq3ConfigEntryData]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
"""Handle config entry setup."""
|
||||
|
||||
mac_address: str | None = entry.unique_id
|
||||
@@ -53,12 +56,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
ble_device=device,
|
||||
)
|
||||
|
||||
eq3_config_entry = Eq3ConfigEntryData(eq3_config=eq3_config, thermostat=thermostat)
|
||||
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = eq3_config_entry
|
||||
|
||||
entry.runtime_data = Eq3ConfigEntryData(
|
||||
eq3_config=eq3_config, thermostat=thermostat
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
entry.async_create_background_task(
|
||||
hass, _async_run_thermostat(hass, entry), entry.entry_id
|
||||
)
|
||||
@@ -66,29 +68,27 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: Eq3ConfigEntry) -> bool:
|
||||
"""Handle config entry unload."""
|
||||
|
||||
if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS):
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN].pop(entry.entry_id)
|
||||
await eq3_config_entry.thermostat.async_disconnect()
|
||||
await entry.runtime_data.thermostat.async_disconnect()
|
||||
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def update_listener(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None:
|
||||
"""Handle config entry update."""
|
||||
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
|
||||
|
||||
async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def _async_run_thermostat(hass: HomeAssistant, entry: Eq3ConfigEntry) -> None:
|
||||
"""Run the thermostat."""
|
||||
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id]
|
||||
thermostat = eq3_config_entry.thermostat
|
||||
mac_address = eq3_config_entry.eq3_config.mac_address
|
||||
scan_interval = eq3_config_entry.eq3_config.scan_interval
|
||||
thermostat = entry.runtime_data.thermostat
|
||||
mac_address = entry.runtime_data.eq3_config.mac_address
|
||||
scan_interval = entry.runtime_data.eq3_config.scan_interval
|
||||
|
||||
await _async_reconnect_thermostat(hass, entry)
|
||||
|
||||
@@ -117,13 +117,14 @@ async def _async_run_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None
|
||||
await asyncio.sleep(scan_interval)
|
||||
|
||||
|
||||
async def _async_reconnect_thermostat(hass: HomeAssistant, entry: ConfigEntry) -> None:
|
||||
async def _async_reconnect_thermostat(
|
||||
hass: HomeAssistant, entry: Eq3ConfigEntry
|
||||
) -> None:
|
||||
"""Reconnect the thermostat."""
|
||||
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][entry.entry_id]
|
||||
thermostat = eq3_config_entry.thermostat
|
||||
mac_address = eq3_config_entry.eq3_config.mac_address
|
||||
scan_interval = eq3_config_entry.eq3_config.scan_interval
|
||||
thermostat = entry.runtime_data.thermostat
|
||||
mac_address = entry.runtime_data.eq3_config.mac_address
|
||||
scan_interval = entry.runtime_data.eq3_config.scan_interval
|
||||
|
||||
while True:
|
||||
try:
|
||||
|
@@ -3,7 +3,6 @@
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from eq3btsmart import Thermostat
|
||||
from eq3btsmart.const import EQ3BT_MAX_TEMP, EQ3BT_OFF_TEMP, Eq3Preset, OperationMode
|
||||
from eq3btsmart.exceptions import Eq3Exception
|
||||
|
||||
@@ -15,45 +14,35 @@ from homeassistant.components.climate import (
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEVICE_MODEL,
|
||||
DOMAIN,
|
||||
EQ_TO_HA_HVAC,
|
||||
HA_TO_EQ_HVAC,
|
||||
MANUFACTURER,
|
||||
SIGNAL_THERMOSTAT_CONNECTED,
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED,
|
||||
CurrentTemperatureSelector,
|
||||
Preset,
|
||||
TargetTemperatureSelector,
|
||||
)
|
||||
from .entity import Eq3Entity
|
||||
from .models import Eq3Config, Eq3ConfigEntryData
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
entry: Eq3ConfigEntry,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
) -> None:
|
||||
"""Handle config entry setup."""
|
||||
|
||||
eq3_config_entry: Eq3ConfigEntryData = hass.data[DOMAIN][config_entry.entry_id]
|
||||
|
||||
async_add_entities(
|
||||
[Eq3Climate(eq3_config_entry.eq3_config, eq3_config_entry.thermostat)],
|
||||
[Eq3Climate(entry)],
|
||||
)
|
||||
|
||||
|
||||
@@ -80,53 +69,6 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
_attr_preset_mode: str | None = None
|
||||
_target_temperature: float | None = None
|
||||
|
||||
def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None:
|
||||
"""Initialize the climate entity."""
|
||||
|
||||
super().__init__(eq3_config, thermostat)
|
||||
self._attr_unique_id = dr.format_mac(eq3_config.mac_address)
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=slugify(self._eq3_config.mac_address),
|
||||
manufacturer=MANUFACTURER,
|
||||
model=DEVICE_MODEL,
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_disconnected,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_connected,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
|
||||
@callback
|
||||
def _async_on_disconnected(self) -> None:
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_connected(self) -> None:
|
||||
self._attr_available = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
@@ -137,12 +79,15 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
if self._thermostat.device_data is not None:
|
||||
self._async_on_device_updated()
|
||||
|
||||
self.async_write_ha_state()
|
||||
super()._async_on_updated()
|
||||
|
||||
@callback
|
||||
def _async_on_status_updated(self) -> None:
|
||||
"""Handle updated status from the thermostat."""
|
||||
|
||||
if self._thermostat.status is None:
|
||||
return
|
||||
|
||||
self._target_temperature = self._thermostat.status.target_temperature.value
|
||||
self._attr_hvac_mode = EQ_TO_HA_HVAC[self._thermostat.status.operation_mode]
|
||||
self._attr_current_temperature = self._get_current_temperature()
|
||||
@@ -154,13 +99,16 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
def _async_on_device_updated(self) -> None:
|
||||
"""Handle updated device data from the thermostat."""
|
||||
|
||||
if self._thermostat.device_data is None:
|
||||
return
|
||||
|
||||
device_registry = dr.async_get(self.hass)
|
||||
if device := device_registry.async_get_device(
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id,
|
||||
sw_version=self._thermostat.device_data.firmware_version,
|
||||
sw_version=str(self._thermostat.device_data.firmware_version),
|
||||
serial_number=self._thermostat.device_data.device_serial.value,
|
||||
)
|
||||
|
||||
@@ -265,7 +213,7 @@ class Eq3Climate(Eq3Entity, ClimateEntity):
|
||||
self.async_write_ha_state()
|
||||
|
||||
try:
|
||||
await self._thermostat.async_set_temperature(self._target_temperature)
|
||||
await self._thermostat.async_set_temperature(temperature)
|
||||
except Eq3Exception:
|
||||
_LOGGER.error(
|
||||
"[%s] Failed setting temperature", self._eq3_config.mac_address
|
||||
|
@@ -20,7 +20,6 @@ DEVICE_MODEL = "CC-RT-BLE-EQ"
|
||||
|
||||
GET_DEVICE_TIMEOUT = 5 # seconds
|
||||
|
||||
|
||||
EQ_TO_HA_HVAC: dict[OperationMode, HVACMode] = {
|
||||
OperationMode.OFF: HVACMode.OFF,
|
||||
OperationMode.ON: HVACMode.HEAT,
|
||||
|
@@ -1,10 +1,22 @@
|
||||
"""Base class for all eQ-3 entities."""
|
||||
|
||||
from eq3btsmart.thermostat import Thermostat
|
||||
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.device_registry import (
|
||||
CONNECTION_BLUETOOTH,
|
||||
DeviceInfo,
|
||||
format_mac,
|
||||
)
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .models import Eq3Config
|
||||
from . import Eq3ConfigEntry
|
||||
from .const import (
|
||||
DEVICE_MODEL,
|
||||
MANUFACTURER,
|
||||
SIGNAL_THERMOSTAT_CONNECTED,
|
||||
SIGNAL_THERMOSTAT_DISCONNECTED,
|
||||
)
|
||||
|
||||
|
||||
class Eq3Entity(Entity):
|
||||
@@ -12,8 +24,60 @@ class Eq3Entity(Entity):
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, eq3_config: Eq3Config, thermostat: Thermostat) -> None:
|
||||
def __init__(self, entry: Eq3ConfigEntry, unique_id_key: str | None = None) -> None:
|
||||
"""Initialize the eq3 entity."""
|
||||
|
||||
self._eq3_config = eq3_config
|
||||
self._thermostat = thermostat
|
||||
self._eq3_config = entry.runtime_data.eq3_config
|
||||
self._thermostat = entry.runtime_data.thermostat
|
||||
self._attr_device_info = DeviceInfo(
|
||||
name=slugify(self._eq3_config.mac_address),
|
||||
manufacturer=MANUFACTURER,
|
||||
model=DEVICE_MODEL,
|
||||
connections={(CONNECTION_BLUETOOTH, self._eq3_config.mac_address)},
|
||||
)
|
||||
suffix = f"_{unique_id_key}" if unique_id_key else ""
|
||||
self._attr_unique_id = f"{format_mac(self._eq3_config.mac_address)}{suffix}"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
|
||||
self._thermostat.register_update_callback(self._async_on_updated)
|
||||
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_DISCONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_disconnected,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{SIGNAL_THERMOSTAT_CONNECTED}_{self._eq3_config.mac_address}",
|
||||
self._async_on_connected,
|
||||
)
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Run when entity will be removed from hass."""
|
||||
|
||||
self._thermostat.unregister_update_callback(self._async_on_updated)
|
||||
|
||||
def _async_on_updated(self) -> None:
|
||||
"""Handle updated data from the thermostat."""
|
||||
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_disconnected(self) -> None:
|
||||
"""Handle disconnection from the thermostat."""
|
||||
|
||||
self._attr_available = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@callback
|
||||
def _async_on_connected(self) -> None:
|
||||
"""Handle connection to the thermostat."""
|
||||
|
||||
self._attr_available = True
|
||||
self.async_write_ha_state()
|
||||
|
@@ -23,5 +23,5 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["eq3btsmart==1.2.0", "bleak-esphome==1.1.0"]
|
||||
"requirements": ["eq3btsmart==1.2.1", "bleak-esphome==1.1.0"]
|
||||
}
|
||||
|
@@ -257,6 +257,9 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
self, discovery_info: MqttServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle MQTT discovery."""
|
||||
if not discovery_info.payload:
|
||||
return self.async_abort(reason="mqtt_missing_payload")
|
||||
|
||||
device_info = json_loads_object(discovery_info.payload)
|
||||
if "mac" not in device_info:
|
||||
return self.async_abort(reason="mqtt_missing_mac")
|
||||
@@ -482,16 +485,12 @@ class EsphomeFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return OptionsFlowHandler(config_entry)
|
||||
return OptionsFlowHandler()
|
||||
|
||||
|
||||
class OptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a option flow for esphome."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -31,6 +31,7 @@ class ESPHomeDashboardCoordinator(DataUpdateCoordinator[dict[str, ConfiguredDevi
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=None,
|
||||
name="ESPHome Dashboard",
|
||||
update_interval=timedelta(minutes=5),
|
||||
always_update=False,
|
||||
|
@@ -8,7 +8,8 @@
|
||||
"service_received": "Action received",
|
||||
"mqtt_missing_mac": "Missing MAC address in MQTT properties.",
|
||||
"mqtt_missing_api": "Missing API port in MQTT properties.",
|
||||
"mqtt_missing_ip": "Missing IP address in MQTT properties."
|
||||
"mqtt_missing_ip": "Missing IP address in MQTT properties.",
|
||||
"mqtt_missing_payload": "Missing MQTT Payload."
|
||||
},
|
||||
"error": {
|
||||
"resolve_error": "Can't resolve address of the ESP. If this error persists, please set a static IP address",
|
||||
|
@@ -240,6 +240,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
coordinator = DataUpdateCoordinator(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=None,
|
||||
name=f"{DOMAIN}_coordinator",
|
||||
update_interval=config[DOMAIN][CONF_SCAN_INTERVAL],
|
||||
update_method=broker.async_update,
|
||||
|
@@ -150,7 +150,7 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> EzvizOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return EzvizOptionsFlowHandler(config_entry)
|
||||
return EzvizOptionsFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -391,10 +391,6 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
class EzvizOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle EZVIZ client options."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -73,11 +73,9 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity):
|
||||
return self.data["version"]
|
||||
|
||||
@property
|
||||
def in_progress(self) -> bool | int | None:
|
||||
def in_progress(self) -> bool:
|
||||
"""Update installation progress."""
|
||||
if self.data["upgrade_in_progress"]:
|
||||
return self.data["upgrade_percent"]
|
||||
return False
|
||||
return bool(self.data["upgrade_in_progress"])
|
||||
|
||||
@property
|
||||
def latest_version(self) -> str | None:
|
||||
@@ -93,6 +91,13 @@ class EzvizUpdateEntity(EzvizEntity, UpdateEntity):
|
||||
return self.data["latest_firmware_info"].get("desc")
|
||||
return None
|
||||
|
||||
@property
|
||||
def update_percentage(self) -> int | None:
|
||||
"""Update installation progress."""
|
||||
if self.data["upgrade_in_progress"]:
|
||||
return self.data["upgrade_percent"]
|
||||
return None
|
||||
|
||||
async def async_install(
|
||||
self, version: str | None, backup: bool, **kwargs: Any
|
||||
) -> None:
|
||||
|
@@ -15,7 +15,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
@@ -46,9 +45,11 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> OptionsFlow:
|
||||
"""Get the options flow for this handler."""
|
||||
return FeedReaderOptionsFlowHandler(config_entry)
|
||||
return FeedReaderOptionsFlowHandler()
|
||||
|
||||
def show_user_form(
|
||||
self,
|
||||
@@ -147,7 +148,7 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="reconfigure_successful")
|
||||
|
||||
|
||||
class FeedReaderOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class FeedReaderOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle an options flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -162,7 +163,9 @@ class FeedReaderOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_MAX_ENTRIES,
|
||||
default=self.options.get(CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES),
|
||||
default=self.config_entry.options.get(
|
||||
CONF_MAX_ENTRIES, DEFAULT_MAX_ENTRIES
|
||||
),
|
||||
): cv.positive_int,
|
||||
}
|
||||
)
|
||||
|
@@ -4,5 +4,5 @@
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/ffmpeg",
|
||||
"integration_type": "system",
|
||||
"requirements": ["ha-ffmpeg==3.2.1"]
|
||||
"requirements": ["ha-ffmpeg==3.2.2"]
|
||||
}
|
||||
|
@@ -3,88 +3,16 @@
|
||||
from copy import deepcopy
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.notify import migrate_notify_issue
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_NAME,
|
||||
CONF_PLATFORM,
|
||||
CONF_SCAN_INTERVAL,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_NAME, CONF_PLATFORM, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import (
|
||||
config_validation as cv,
|
||||
discovery,
|
||||
issue_registry as ir,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .notify import PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA
|
||||
from .sensor import PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA
|
||||
|
||||
IMPORT_SCHEMA = {
|
||||
Platform.SENSOR: SENSOR_PLATFORM_SCHEMA,
|
||||
Platform.NOTIFY: NOTIFY_PLATFORM_SCHEMA,
|
||||
}
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the file integration."""
|
||||
|
||||
hass.data[DOMAIN] = config
|
||||
if hass.config_entries.async_entries(DOMAIN):
|
||||
# We skip import in case we already have config entries
|
||||
return True
|
||||
# The use of the legacy notify service was deprecated with HA Core 2024.6.0
|
||||
# and will be removed with HA Core 2024.12
|
||||
migrate_notify_issue(hass, DOMAIN, "File", "2024.12.0")
|
||||
# The YAML config was imported with HA Core 2024.6.0 and will be removed with
|
||||
# HA Core 2024.12
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
breaks_in_ha_version="2024.12.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
learn_more_url="https://www.home-assistant.io/integrations/file/",
|
||||
severity=ir.IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "File",
|
||||
},
|
||||
)
|
||||
|
||||
# Import the YAML config into separate config entries
|
||||
platforms_config: dict[Platform, list[ConfigType]] = {
|
||||
domain: config[domain] for domain in PLATFORMS if domain in config
|
||||
}
|
||||
for domain, items in platforms_config.items():
|
||||
for item in items:
|
||||
if item[CONF_PLATFORM] == DOMAIN:
|
||||
file_config_item = IMPORT_SCHEMA[domain](item)
|
||||
file_config_item[CONF_PLATFORM] = domain
|
||||
if CONF_SCAN_INTERVAL in file_config_item:
|
||||
del file_config_item[CONF_SCAN_INTERVAL]
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN,
|
||||
context={"source": SOURCE_IMPORT},
|
||||
data=file_config_item,
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
"""Set up a file component entry."""
|
||||
config = {**entry.data, **entry.options}
|
||||
@@ -102,20 +30,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
entry, [Platform(entry.data[CONF_PLATFORM])]
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(update_listener))
|
||||
if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data:
|
||||
# New notify entities are being setup through the config entry,
|
||||
# but during the deprecation period we want to keep the legacy notify platform,
|
||||
# so we forward the setup config through discovery.
|
||||
# Only the entities from yaml will still be available as legacy service.
|
||||
hass.async_create_task(
|
||||
discovery.async_load_platform(
|
||||
hass,
|
||||
Platform.NOTIFY,
|
||||
DOMAIN,
|
||||
config,
|
||||
hass.data[DOMAIN],
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
@@ -1,7 +1,8 @@
|
||||
"""Config flow for file integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from copy import deepcopy
|
||||
import os
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
@@ -11,11 +12,9 @@ from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
CONF_FILENAME,
|
||||
CONF_NAME,
|
||||
CONF_PLATFORM,
|
||||
CONF_UNIT_OF_MEASUREMENT,
|
||||
@@ -74,9 +73,11 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> FileOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return FileOptionsFlowHandler(config_entry)
|
||||
return FileOptionsFlowHandler()
|
||||
|
||||
async def validate_file_path(self, file_path: str) -> bool:
|
||||
"""Ensure the file path is valid."""
|
||||
@@ -129,29 +130,8 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle file sensor config flow."""
|
||||
return await self._async_handle_step(Platform.SENSOR.value, user_input)
|
||||
|
||||
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import `file`` config from configuration.yaml."""
|
||||
self._async_abort_entries_match(import_data)
|
||||
platform = import_data[CONF_PLATFORM]
|
||||
name: str = import_data.get(CONF_NAME, DEFAULT_NAME)
|
||||
file_name: str
|
||||
if platform == Platform.NOTIFY:
|
||||
file_name = import_data.pop(CONF_FILENAME)
|
||||
file_path: str = os.path.join(self.hass.config.config_dir, file_name)
|
||||
import_data[CONF_FILE_PATH] = file_path
|
||||
else:
|
||||
file_path = import_data[CONF_FILE_PATH]
|
||||
title = f"{name} [{file_path}]"
|
||||
data = deepcopy(import_data)
|
||||
options = {}
|
||||
for key, value in import_data.items():
|
||||
if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME):
|
||||
data.pop(key)
|
||||
options[key] = value
|
||||
return self.async_create_entry(title=title, data=data, options=options)
|
||||
|
||||
|
||||
class FileOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class FileOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle File options."""
|
||||
|
||||
async def async_step_init(
|
||||
|
@@ -2,104 +2,23 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
import os
|
||||
from typing import Any, TextIO
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.notify import (
|
||||
ATTR_TITLE,
|
||||
ATTR_TITLE_DEFAULT,
|
||||
PLATFORM_SCHEMA as NOTIFY_PLATFORM_SCHEMA,
|
||||
BaseNotificationService,
|
||||
NotifyEntity,
|
||||
NotifyEntityFeature,
|
||||
migrate_notify_issue,
|
||||
)
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_FILENAME, CONF_NAME
|
||||
from homeassistant.const import CONF_FILE_PATH, CONF_NAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
import homeassistant.util.dt as dt_util
|
||||
|
||||
from .const import CONF_TIMESTAMP, DEFAULT_NAME, DOMAIN, FILE_ICON
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# The legacy platform schema uses a filename, after import
|
||||
# The full file path is stored in the config entry
|
||||
PLATFORM_SCHEMA = NOTIFY_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FILENAME): cv.string,
|
||||
vol.Optional(CONF_TIMESTAMP, default=False): cv.boolean,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_get_service(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> FileNotificationService | None:
|
||||
"""Get the file notification service."""
|
||||
if discovery_info is None:
|
||||
# We only set up through discovery
|
||||
return None
|
||||
file_path: str = discovery_info[CONF_FILE_PATH]
|
||||
timestamp: bool = discovery_info[CONF_TIMESTAMP]
|
||||
|
||||
return FileNotificationService(file_path, timestamp)
|
||||
|
||||
|
||||
class FileNotificationService(BaseNotificationService):
|
||||
"""Implement the notification service for the File service."""
|
||||
|
||||
def __init__(self, file_path: str, add_timestamp: bool) -> None:
|
||||
"""Initialize the service."""
|
||||
self._file_path = file_path
|
||||
self.add_timestamp = add_timestamp
|
||||
|
||||
async def async_send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a file."""
|
||||
# The use of the legacy notify service was deprecated with HA Core 2024.6.0
|
||||
# and will be removed with HA Core 2024.12
|
||||
migrate_notify_issue(
|
||||
self.hass, DOMAIN, "File", "2024.12.0", service_name=self._service_name
|
||||
)
|
||||
await self.hass.async_add_executor_job(
|
||||
partial(self.send_message, message, **kwargs)
|
||||
)
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
"""Send a message to a file."""
|
||||
file: TextIO
|
||||
filepath = self._file_path
|
||||
try:
|
||||
with open(filepath, "a", encoding="utf8") as file:
|
||||
if os.stat(filepath).st_size == 0:
|
||||
title = (
|
||||
f"{kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)} notifications (Log"
|
||||
f" started: {dt_util.utcnow().isoformat()})\n{'-' * 80}\n"
|
||||
)
|
||||
file.write(title)
|
||||
|
||||
if self.add_timestamp:
|
||||
text = f"{dt_util.utcnow().isoformat()} {message}\n"
|
||||
else:
|
||||
text = f"{message}\n"
|
||||
file.write(text)
|
||||
except OSError as exc:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="write_access_failed",
|
||||
translation_placeholders={"filename": filepath, "exc": f"{exc!r}"},
|
||||
) from exc
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@@ -6,12 +6,8 @@ import logging
|
||||
import os
|
||||
|
||||
from file_read_backwards import FileReadBackwards
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA,
|
||||
SensorEntity,
|
||||
)
|
||||
from homeassistant.components.sensor import SensorEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import (
|
||||
CONF_FILE_PATH,
|
||||
@@ -20,38 +16,13 @@ from homeassistant.const import (
|
||||
CONF_VALUE_TEMPLATE,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.entity_platform import AddEntitiesCallback
|
||||
from homeassistant.helpers.template import Template
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import DEFAULT_NAME, FILE_ICON
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_FILE_PATH): cv.isfile,
|
||||
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
|
||||
vol.Optional(CONF_VALUE_TEMPLATE): cv.string,
|
||||
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_platform(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_add_entities: AddEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up the file sensor from YAML.
|
||||
|
||||
The YAML platform config is automatically
|
||||
imported to a config entry, this method can be removed
|
||||
when YAML support is removed.
|
||||
"""
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
|
@@ -71,9 +71,11 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> FluxLedOptionsFlow:
|
||||
"""Get the options flow for the Flux LED component."""
|
||||
return FluxLedOptionsFlow(config_entry)
|
||||
return FluxLedOptionsFlow()
|
||||
|
||||
async def async_step_dhcp(
|
||||
self, discovery_info: dhcp.DhcpServiceInfo
|
||||
@@ -320,10 +322,6 @@ class FluxLedConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
class FluxLedOptionsFlow(OptionsFlow):
|
||||
"""Handle flux_led options."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize the flux_led options flow."""
|
||||
self._config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -332,7 +330,7 @@ class FluxLedOptionsFlow(OptionsFlow):
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
options = self._config_entry.options
|
||||
options = self.config_entry.options
|
||||
options_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
|
@@ -41,7 +41,7 @@ class ForecastSolarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> ForecastSolarOptionFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return ForecastSolarOptionFlowHandler(config_entry)
|
||||
return ForecastSolarOptionFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -91,10 +91,6 @@ class ForecastSolarFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
class ForecastSolarOptionFlowHandler(OptionsFlow):
|
||||
"""Handle options."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize options flow."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
|
@@ -52,10 +52,6 @@ TEST_CONNECTION_ERROR_DICT = {
|
||||
class ForkedDaapdOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a forked-daapd options flow."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -122,7 +118,7 @@ class ForkedDaapdFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> ForkedDaapdOptionsFlowHandler:
|
||||
"""Return options flow handler."""
|
||||
return ForkedDaapdOptionsFlowHandler(config_entry)
|
||||
return ForkedDaapdOptionsFlowHandler()
|
||||
|
||||
async def validate_input(self, user_input):
|
||||
"""Validate the user input."""
|
||||
|
@@ -23,7 +23,6 @@ from homeassistant.config_entries import (
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
OptionsFlow,
|
||||
OptionsFlowWithConfigEntry,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_HOST,
|
||||
@@ -60,9 +59,11 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@staticmethod
|
||||
@callback
|
||||
def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow:
|
||||
def async_get_options_flow(
|
||||
config_entry: ConfigEntry,
|
||||
) -> FritzBoxToolsOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return FritzBoxToolsOptionsFlowHandler(config_entry)
|
||||
return FritzBoxToolsOptionsFlowHandler()
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize FRITZ!Box Tools flow."""
|
||||
@@ -393,7 +394,7 @@ class FritzBoxToolsFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
)
|
||||
|
||||
|
||||
class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
class FritzBoxToolsOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle an options flow."""
|
||||
|
||||
async def async_step_init(
|
||||
@@ -404,19 +405,18 @@ class FritzBoxToolsOptionsFlowHandler(OptionsFlowWithConfigEntry):
|
||||
if user_input is not None:
|
||||
return self.async_create_entry(title="", data=user_input)
|
||||
|
||||
options = self.config_entry.options
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_CONSIDER_HOME,
|
||||
default=self.options.get(
|
||||
default=options.get(
|
||||
CONF_CONSIDER_HOME, DEFAULT_CONSIDER_HOME.total_seconds()
|
||||
),
|
||||
): vol.All(vol.Coerce(int), vol.Clamp(min=0, max=900)),
|
||||
vol.Optional(
|
||||
CONF_OLD_DISCOVERY,
|
||||
default=self.options.get(
|
||||
CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY
|
||||
),
|
||||
default=options.get(CONF_OLD_DISCOVERY, DEFAULT_CONF_OLD_DISCOVERY),
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
|
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "fritz",
|
||||
"name": "AVM FRITZ!Box Tools",
|
||||
"codeowners": ["@mammuth", "@AaronDavidSchneider", "@chemelli74", "@mib1185"],
|
||||
"codeowners": ["@AaronDavidSchneider", "@chemelli74", "@mib1185"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["network"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/fritz",
|
||||
|
@@ -141,7 +141,7 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
config_entry: ConfigEntry,
|
||||
) -> FritzBoxCallMonitorOptionsFlowHandler:
|
||||
"""Get the options flow for this handler."""
|
||||
return FritzBoxCallMonitorOptionsFlowHandler(config_entry)
|
||||
return FritzBoxCallMonitorOptionsFlowHandler()
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
@@ -278,10 +278,6 @@ class FritzBoxCallMonitorConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
class FritzBoxCallMonitorOptionsFlowHandler(OptionsFlow):
|
||||
"""Handle a fritzbox_callmonitor options flow."""
|
||||
|
||||
def __init__(self, config_entry: ConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
self.config_entry = config_entry
|
||||
|
||||
@classmethod
|
||||
def _are_prefixes_valid(cls, prefixes: str | None) -> bool:
|
||||
"""Check if prefixes are valid."""
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user