mirror of
https://github.com/home-assistant/core.git
synced 2025-12-18 13:58:01 +00:00
Compare commits
250 Commits
2025.12.3
...
knx-data-s
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
363997cf5d | ||
|
|
0cfe0ed709 | ||
|
|
ea3c9e2520 | ||
|
|
c0863ca585 | ||
|
|
9d53d37cbf | ||
|
|
823f320425 | ||
|
|
b5a8516bd6 | ||
|
|
f05cb6b2c7 | ||
|
|
1a60c46d67 | ||
|
|
62fba5ca20 | ||
|
|
b54cde795c | ||
|
|
0f456373bf | ||
|
|
a5042027b8 | ||
|
|
b15b5ba95c | ||
|
|
cd6e72798e | ||
|
|
739157e59f | ||
|
|
267aa1af42 | ||
|
|
7328b61a69 | ||
|
|
203f2fb364 | ||
|
|
b956c17ce4 | ||
|
|
5163dc0567 | ||
|
|
31a0478717 | ||
|
|
24da3f0db8 | ||
|
|
786922fc5d | ||
|
|
c2f8b6986b | ||
|
|
0a0832671f | ||
|
|
7b353d7ad4 | ||
|
|
99de73a729 | ||
|
|
1995fbd252 | ||
|
|
315ea9dc76 | ||
|
|
639a96f8cb | ||
|
|
b6786c5a42 | ||
|
|
6f6e9b8057 | ||
|
|
e0c687e415 | ||
|
|
982362110c | ||
|
|
90dc3a8fdf | ||
|
|
5112742b71 | ||
|
|
8899bc01bd | ||
|
|
ed8f9105ff | ||
|
|
185de98f5e | ||
|
|
e857abb43f | ||
|
|
5b1829f3a1 | ||
|
|
520156a33a | ||
|
|
e3b5342b76 | ||
|
|
951b19e80c | ||
|
|
e2351ecec2 | ||
|
|
d75e5498c6 | ||
|
|
2dd58dbe39 | ||
|
|
4ef17799db | ||
|
|
9373378350 | ||
|
|
18833a194b | ||
|
|
2631c77bee | ||
|
|
c67247bf32 | ||
|
|
18b5ffd365 | ||
|
|
c4e3a4d65e | ||
|
|
84d2686517 | ||
|
|
ae8980ce5b | ||
|
|
b2d4c9ecb4 | ||
|
|
f5b046ee7d | ||
|
|
55c5fb7374 | ||
|
|
5d78cd328a | ||
|
|
bc36578ada | ||
|
|
e63242e465 | ||
|
|
e84c09745d | ||
|
|
f07991d0ba | ||
|
|
872fef1f6f | ||
|
|
c866dc973c | ||
|
|
e2acf30637 | ||
|
|
29631a2c5a | ||
|
|
1d31e6d0ea | ||
|
|
8109d9a39c | ||
|
|
e1abd451b8 | ||
|
|
2c72cd94f2 | ||
|
|
3bccb4b89c | ||
|
|
6d4fb30630 | ||
|
|
c04411f1bc | ||
|
|
753ea023de | ||
|
|
1ca1cf59eb | ||
|
|
5b01bb1a29 | ||
|
|
15c89d24eb | ||
|
|
b26b2347e6 | ||
|
|
7d54103c09 | ||
|
|
c705a1dc4b | ||
|
|
998bd23446 | ||
|
|
3a1a58d6ad | ||
|
|
f9219dd841 | ||
|
|
402ed7e0f3 | ||
|
|
7a1a5df89e | ||
|
|
df558fc1e7 | ||
|
|
ec66407ef1 | ||
|
|
6b99234a43 | ||
|
|
393be71009 | ||
|
|
12bc1687ec | ||
|
|
c59b322c0a | ||
|
|
e00266463d | ||
|
|
cbc8a33553 | ||
|
|
28582f75d4 | ||
|
|
39cccd212d | ||
|
|
329ea33337 | ||
|
|
521733c420 | ||
|
|
33e9f9a0ff | ||
|
|
5fda2bccbe | ||
|
|
ae75332656 | ||
|
|
b171785f96 | ||
|
|
ff3d6783c6 | ||
|
|
b1e579bea0 | ||
|
|
87241ea051 | ||
|
|
a871ec0bdf | ||
|
|
b8829b645a | ||
|
|
5b056a83d4 | ||
|
|
02a70123c1 | ||
|
|
5f6d2f537a | ||
|
|
5e04e9f04d | ||
|
|
56515ad7b5 | ||
|
|
a1fe2bf4fa | ||
|
|
b8fa8efd91 | ||
|
|
03557b5ef2 | ||
|
|
dafec8ce58 | ||
|
|
6ff3f74347 | ||
|
|
ddd8cf7fde | ||
|
|
1356eea52f | ||
|
|
6188e0e39b | ||
|
|
699fa1617d | ||
|
|
449f0fa5a5 | ||
|
|
2e008d2bb7 | ||
|
|
05dec2619d | ||
|
|
25a6778ba8 | ||
|
|
f564b8cb44 | ||
|
|
ce6bfdebfc | ||
|
|
f00a944ac1 | ||
|
|
3073a99ce6 | ||
|
|
8b04ce1328 | ||
|
|
39f76787ab | ||
|
|
e8acced335 | ||
|
|
758a30eebc | ||
|
|
faf94bea24 | ||
|
|
ff91c57228 | ||
|
|
3d2b506997 | ||
|
|
d3c1c28605 | ||
|
|
d4e1f7741d | ||
|
|
e713632eed | ||
|
|
060ad35ddc | ||
|
|
6c5dba40cd | ||
|
|
a04d595424 | ||
|
|
fe85eaf2a2 | ||
|
|
3551c4b01f | ||
|
|
e7edd51a65 | ||
|
|
0c4f2326ef | ||
|
|
81f4456d7c | ||
|
|
2b608bf15c | ||
|
|
972ed4b27f | ||
|
|
23c167da1b | ||
|
|
34d6938171 | ||
|
|
4bb8590076 | ||
|
|
5e0923b60d | ||
|
|
ad48f3c634 | ||
|
|
2bdd6854eb | ||
|
|
0bf906911c | ||
|
|
874d6f5613 | ||
|
|
43ba10eebd | ||
|
|
64bed19805 | ||
|
|
6357067f0f | ||
|
|
e328ba4045 | ||
|
|
332dbddce6 | ||
|
|
82d935a819 | ||
|
|
4b84998c0c | ||
|
|
e10c1ebcf6 | ||
|
|
0174bad182 | ||
|
|
d5be623684 | ||
|
|
d006b044c8 | ||
|
|
fdd9571623 | ||
|
|
4f4c5152b9 | ||
|
|
b031a082cd | ||
|
|
a1132195fd | ||
|
|
708b3dc8b2 | ||
|
|
8ae0216135 | ||
|
|
1472281cd5 | ||
|
|
ceaa71d198 | ||
|
|
7f0d0c555a | ||
|
|
3b94b2491a | ||
|
|
8c8708d5bc | ||
|
|
ca35102138 | ||
|
|
1a1b50ef1a | ||
|
|
5a4d51e57a | ||
|
|
9e1bc637e2 | ||
|
|
ab879c07ca | ||
|
|
488c97531e | ||
|
|
3b52c5df79 | ||
|
|
7f4b56104d | ||
|
|
ab8135ba1a | ||
|
|
a88599bc09 | ||
|
|
45034279c8 | ||
|
|
9f3dae6254 | ||
|
|
ef36d7b1e5 | ||
|
|
e5346ba017 | ||
|
|
68d41d2a48 | ||
|
|
00a882c20a | ||
|
|
44a6772947 | ||
|
|
f874ba1355 | ||
|
|
4fc125c49a | ||
|
|
8c59196e19 | ||
|
|
326f7f0559 | ||
|
|
11afda8c22 | ||
|
|
f1ee0e4ac9 | ||
|
|
5f522e5afa | ||
|
|
4f6624d0aa | ||
|
|
70990645a7 | ||
|
|
2f7d74ff62 | ||
|
|
885667832b | ||
|
|
4646929987 | ||
|
|
010aea952c | ||
|
|
563678dc47 | ||
|
|
a48f01f213 | ||
|
|
08b758b0d2 | ||
|
|
4306fbea52 | ||
|
|
6f4c479f8f | ||
|
|
1d9c06264e | ||
|
|
d045ecaf13 | ||
|
|
f7c41e694c | ||
|
|
9ee7ed5cdb | ||
|
|
83c4e2abc9 | ||
|
|
a7dbf551a3 | ||
|
|
0b2bb9f6bf | ||
|
|
0769163b67 | ||
|
|
2bb51e1146 | ||
|
|
d2248d282c | ||
|
|
8fe79a88ca | ||
|
|
7a328539b2 | ||
|
|
ec69efee4d | ||
|
|
dbcde549d4 | ||
|
|
988355e138 | ||
|
|
7711eac607 | ||
|
|
32fe53cceb | ||
|
|
3a65d3c0dc | ||
|
|
7fe26223ac | ||
|
|
7e8496afb2 | ||
|
|
2ec5190243 | ||
|
|
a706db8fdb | ||
|
|
a00923c48b | ||
|
|
7480d59f0f | ||
|
|
4c8d9ed401 | ||
|
|
eef10c59db | ||
|
|
a1a1f8dd77 | ||
|
|
c75a5c5151 | ||
|
|
cdaaa2bd8f | ||
|
|
bd84dac8fb | ||
|
|
42cbeca5b0 | ||
|
|
ad0a498d10 | ||
|
|
973405822b | ||
|
|
b883d2f519 |
42
.github/workflows/builder.yml
vendored
42
.github/workflows/builder.yml
vendored
@@ -30,7 +30,7 @@ jobs:
|
||||
architectures: ${{ env.ARCHITECTURES }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
@@ -96,7 +96,7 @@ jobs:
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
@@ -190,7 +190,8 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install Cosign
|
||||
- &install_cosign
|
||||
name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
@@ -272,7 +273,7 @@ jobs:
|
||||
- green
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set build additional args
|
||||
run: |
|
||||
@@ -294,7 +295,7 @@ jobs:
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
uses: home-assistant/builder@2025.11.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -310,7 +311,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize git
|
||||
uses: home-assistant/actions/helpers/git-init@master
|
||||
@@ -353,10 +354,7 @@ jobs:
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
- *install_cosign
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -393,7 +391,7 @@ jobs:
|
||||
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
|
||||
- name: Generate Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ matrix.registry }}/home-assistant
|
||||
sep-tags: ","
|
||||
@@ -407,7 +405,7 @@ jobs:
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@aa33708b10e362ff993539393ff100fa93ed6a27 # v3.7.1
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
|
||||
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -418,19 +416,9 @@ jobs:
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Copying ${arch} image to DockerHub..."
|
||||
for attempt in 1 2 3; do
|
||||
if docker buildx imagetools create \
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"; then
|
||||
break
|
||||
fi
|
||||
echo "Attempt ${attempt} failed, retrying in 10 seconds..."
|
||||
sleep 10
|
||||
if [ "${attempt}" -eq 3 ]; then
|
||||
echo "Failed after 3 attempts"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
docker buildx imagetools create \
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
|
||||
@@ -476,7 +464,7 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant' && needs.init.outputs.publish == 'true'
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
@@ -521,7 +509,7 @@ jobs:
|
||||
HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }}
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
|
||||
4
.github/workflows/ci.yaml
vendored
4
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.12"
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13.9"
|
||||
ALL_PYTHON_VERSIONS: "['3.13.9', '3.14.0']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -99,7 +99,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate_python_cache_key
|
||||
run: |
|
||||
|
||||
6
.github/workflows/codeql.yml
vendored
6
.github/workflows/codeql.yml
vendored
@@ -21,14 +21,14 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
uses: github/codeql-action/init@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
uses: github/codeql-action/analyze@fe4161a26a8629af62121b670040955b330f9af2 # v4.31.6
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
6
.github/workflows/stale.yml
vendored
6
.github/workflows/stale.yml
vendored
@@ -17,7 +17,7 @@ jobs:
|
||||
# - No PRs marked as no-stale
|
||||
# - No issues (-1)
|
||||
- name: 60 days stale PRs policy
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ secrets.GITHUB_TOKEN }}
|
||||
days-before-stale: 60
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
# - No issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: 90 days stale issues
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
days-before-stale: 90
|
||||
@@ -87,7 +87,7 @@ jobs:
|
||||
# - No Issues marked as no-stale or help-wanted
|
||||
# - No PRs (-1)
|
||||
- name: Needs more information stale issues policy
|
||||
uses: actions/stale@5f858e3efba33a5ca4407a664cc011ad407f2008 # v10.1.0
|
||||
uses: actions/stale@997185467fa4f803885201cee163a9f38240193d # v10.1.1
|
||||
with:
|
||||
repo-token: ${{ steps.token.outputs.token }}
|
||||
only-labels: "needs-more-information"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -19,7 +19,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
|
||||
4
.github/workflows/wheels.yml
vendored
4
.github/workflows/wheels.yml
vendored
@@ -31,7 +31,7 @@ jobs:
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
@@ -136,7 +136,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: &home-assistant-wheels home-assistant/wheels@6066c17a2a4aafcf7bdfeae01717f63adfcdba98 # 2025.11.0
|
||||
uses: &home-assistant-wheels home-assistant/wheels@e5742a69d69f0e274e2689c998900c7d19652c21 # 2025.12.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
|
||||
7
CODEOWNERS
generated
7
CODEOWNERS
generated
@@ -539,6 +539,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/fressnapf_tracker/ @eifinger
|
||||
/tests/components/fressnapf_tracker/ @eifinger
|
||||
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||
@@ -1354,8 +1356,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/ring/ @sdb9696
|
||||
/homeassistant/components/risco/ @OnFreund
|
||||
/tests/components/risco/ @OnFreund
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck @quebulm
|
||||
/homeassistant/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/tests/components/rituals_perfume_genie/ @milanmeu @frenck
|
||||
/homeassistant/components/rmvtransport/ @cgtobi
|
||||
/tests/components/rmvtransport/ @cgtobi
|
||||
/homeassistant/components/roborock/ @Lash-L @allenporter
|
||||
@@ -1761,6 +1763,7 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/vilfo/ @ManneW
|
||||
/tests/components/vilfo/ @ManneW
|
||||
/homeassistant/components/vivotek/ @HarlemSquirrel
|
||||
/tests/components/vivotek/ @HarlemSquirrel
|
||||
/homeassistant/components/vizio/ @raman325
|
||||
/tests/components/vizio/ @raman325
|
||||
/homeassistant/components/vlc_telnet/ @rodripf @MartinHjelmare
|
||||
|
||||
@@ -35,25 +35,22 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
USER vscode
|
||||
|
||||
COPY .python-version ./
|
||||
RUN uv python install
|
||||
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
RUN uv venv $VIRTUAL_ENV
|
||||
RUN --mount=type=bind,source=.python-version,target=.python-version \
|
||||
uv python install \
|
||||
&& uv venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
|
||||
&& uv pip install -e ~/hass-release/
|
||||
|
||||
# Install Python dependencies from requirements
|
||||
COPY requirements.txt ./
|
||||
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||
RUN uv pip install -r requirements.txt
|
||||
COPY requirements_test.txt requirements_test_pre_commit.txt ./
|
||||
RUN uv pip install -r requirements_test.txt
|
||||
RUN --mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=homeassistant/package_constraints.txt,target=homeassistant/package_constraints.txt \
|
||||
--mount=type=bind,source=requirements_test.txt,target=requirements_test.txt \
|
||||
--mount=type=bind,source=requirements_test_pre_commit.txt,target=requirements_test_pre_commit.txt \
|
||||
uv pip install -r requirements.txt -r requirements_test.txt
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
|
||||
@@ -1000,7 +1000,7 @@ class _WatchPendingSetups:
|
||||
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
|
||||
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
||||
_LOGGER.warning(
|
||||
"Waiting on integrations to complete setup: %s",
|
||||
"Waiting for integrations to complete setup: %s",
|
||||
self._setup_started,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -174,6 +175,56 @@ class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
# Combine existing data with new password
|
||||
data = {
|
||||
CONF_HOST: reauth_entry.data[CONF_HOST],
|
||||
CONF_USERNAME: reauth_entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, data)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"username": reauth_entry.data[CONF_USERNAME],
|
||||
"host": reauth_entry.data[CONF_HOST],
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -11,6 +11,7 @@ from pyairobotrest.exceptions import AirobotAuthError, AirobotConnectionError
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -53,7 +54,15 @@ class AirobotDataUpdateCoordinator(DataUpdateCoordinator[AirobotData]):
|
||||
try:
|
||||
status = await self.client.get_statuses()
|
||||
settings = await self.client.get_settings()
|
||||
except (AirobotAuthError, AirobotConnectionError) as err:
|
||||
raise UpdateFailed(f"Failed to communicate with device: {err}") from err
|
||||
except AirobotAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="authentication_failed",
|
||||
) from err
|
||||
except AirobotConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_failed",
|
||||
) from err
|
||||
|
||||
return AirobotData(status=status, settings=settings)
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairobotrest"],
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyairobotrest==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -14,15 +15,24 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The thermostat password."
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Airobot thermostat {device_id} discovered at {host}. Enter the password to complete setup. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "The authentication for Airobot thermostat at {host} (Device ID: {username}) has expired. Please enter the password to reauthenticate. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
"username": "Device ID"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Airobot thermostat.",
|
||||
@@ -34,6 +44,12 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed, please reauthenticate."
|
||||
},
|
||||
"connection_failed": {
|
||||
"message": "Failed to communicate with device."
|
||||
},
|
||||
"set_preset_mode_failed": {
|
||||
"message": "Failed to set preset mode to {preset_mode}."
|
||||
},
|
||||
|
||||
@@ -30,7 +30,6 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -69,19 +68,34 @@ class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
account_number=user_input[CONF_ACCOUNT_NUMBER],
|
||||
account_number=user_input.get(CONF_ACCOUNT_NUMBER),
|
||||
)
|
||||
)
|
||||
if isinstance(validation_response, BaseAuth):
|
||||
await self.async_set_unique_id(user_input[CONF_ACCOUNT_NUMBER])
|
||||
account_number = (
|
||||
user_input.get(CONF_ACCOUNT_NUMBER)
|
||||
or validation_response.account_number
|
||||
)
|
||||
await self.async_set_unique_id(account_number)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_ACCOUNT_NUMBER],
|
||||
title=account_number,
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
CONF_ACCOUNT_NUMBER: account_number,
|
||||
},
|
||||
)
|
||||
if validation_response == "smart_meter_unavailable":
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
),
|
||||
errors={"base": validation_response},
|
||||
)
|
||||
errors["base"] = validation_response
|
||||
|
||||
return self.async_show_form(
|
||||
|
||||
@@ -5,7 +5,6 @@
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["pyanglianwater"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyanglianwater==2.1.0"]
|
||||
}
|
||||
|
||||
@@ -421,6 +421,8 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
|
||||
@@ -583,7 +583,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
model=subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL]),
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.73.0"]
|
||||
"requirements": ["anthropic==0.75.0"]
|
||||
}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aioasuswrt", "asusrouter", "asyncssh"],
|
||||
"requirements": ["aioasuswrt==1.5.2", "asusrouter==1.21.3"]
|
||||
"requirements": ["aioasuswrt==1.5.1", "asusrouter==1.21.0"]
|
||||
}
|
||||
|
||||
@@ -17,8 +17,12 @@ from homeassistant.components.media_player import (
|
||||
class BangOlufsenSource:
|
||||
"""Class used for associating device source ids with friendly names. May not include all sources."""
|
||||
|
||||
DEEZER: Final[Source] = Source(name="Deezer", id="deezer")
|
||||
LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn")
|
||||
NET_RADIO: Final[Source] = Source(name="B&O Radio", id="netRadio")
|
||||
SPDIF: Final[Source] = Source(name="Optical", id="spdif")
|
||||
TIDAL: Final[Source] = Source(name="Tidal", id="tidal")
|
||||
UNKNOWN: Final[Source] = Source(name="Unknown Source", id="unknown")
|
||||
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
|
||||
|
||||
|
||||
@@ -78,6 +82,16 @@ class BangOlufsenModel(StrEnum):
|
||||
BEOREMOTE_ONE = "Beoremote One"
|
||||
|
||||
|
||||
class BangOlufsenAttribute(StrEnum):
|
||||
"""Enum for extra_state_attribute keys."""
|
||||
|
||||
BEOLINK = "beolink"
|
||||
BEOLINK_PEERS = "peers"
|
||||
BEOLINK_SELF = "self"
|
||||
BEOLINK_LEADER = "leader"
|
||||
BEOLINK_LISTENERS = "listeners"
|
||||
|
||||
|
||||
# Physical "buttons" on devices
|
||||
class BangOlufsenButtons(StrEnum):
|
||||
"""Enum for device buttons."""
|
||||
|
||||
@@ -82,6 +82,7 @@ from .const import (
|
||||
FALLBACK_SOURCES,
|
||||
MANUFACTURER,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenAttribute,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
WebsocketNotification,
|
||||
@@ -224,7 +225,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Beolink compatible sources
|
||||
self._beolink_sources: dict[str, bool] = {}
|
||||
self._remote_leader: BeolinkLeader | None = None
|
||||
# Extra state attributes for showing Beolink: peer(s), listener(s), leader and self
|
||||
# Extra state attributes:
|
||||
# Beolink: peer(s), listener(s), leader and self
|
||||
self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
@@ -436,7 +438,10 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
await self._async_update_beolink()
|
||||
|
||||
async def _async_update_beolink(self) -> None:
|
||||
"""Update the current Beolink leader, listeners, peers and self."""
|
||||
"""Update the current Beolink leader, listeners, peers and self.
|
||||
|
||||
Updates Home Assistant state.
|
||||
"""
|
||||
|
||||
self._beolink_attributes = {}
|
||||
|
||||
@@ -445,18 +450,24 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
# Add Beolink self
|
||||
self._beolink_attributes = {
|
||||
"beolink": {"self": {self.device_entry.name: self._beolink_jid}}
|
||||
BangOlufsenAttribute.BEOLINK: {
|
||||
BangOlufsenAttribute.BEOLINK_SELF: {
|
||||
self.device_entry.name: self._beolink_jid
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add Beolink peers
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
if len(peers) > 0:
|
||||
self._beolink_attributes["beolink"]["peers"] = {}
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_PEERS
|
||||
] = {}
|
||||
for peer in peers:
|
||||
self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = (
|
||||
peer.jid
|
||||
)
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_PEERS
|
||||
][peer.friendly_name] = peer.jid
|
||||
|
||||
# Add Beolink listeners / leader
|
||||
self._remote_leader = self._playback_metadata.remote_leader
|
||||
@@ -477,7 +488,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Add self
|
||||
group_members.append(self.entity_id)
|
||||
|
||||
self._beolink_attributes["beolink"]["leader"] = {
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_LEADER
|
||||
] = {
|
||||
self._remote_leader.friendly_name: self._remote_leader.jid,
|
||||
}
|
||||
|
||||
@@ -514,9 +527,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
beolink_listener.jid
|
||||
)
|
||||
break
|
||||
self._beolink_attributes["beolink"]["listeners"] = (
|
||||
beolink_listeners_attribute
|
||||
)
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_LISTENERS
|
||||
] = beolink_listeners_attribute
|
||||
|
||||
self._attr_group_members = group_members
|
||||
|
||||
@@ -615,11 +628,18 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
return None
|
||||
|
||||
@property
|
||||
def media_content_type(self) -> str:
|
||||
def media_content_type(self) -> MediaType | str | None:
|
||||
"""Return the current media type."""
|
||||
# Hard to determine content type
|
||||
if self._source_change.id == BangOlufsenSource.URI_STREAMER.id:
|
||||
return MediaType.URL
|
||||
content_type = {
|
||||
BangOlufsenSource.URI_STREAMER.id: MediaType.URL,
|
||||
BangOlufsenSource.DEEZER.id: BangOlufsenMediaType.DEEZER,
|
||||
BangOlufsenSource.TIDAL.id: BangOlufsenMediaType.TIDAL,
|
||||
BangOlufsenSource.NET_RADIO.id: BangOlufsenMediaType.RADIO,
|
||||
}
|
||||
# Hard to determine content type.
|
||||
if self._source_change.id in content_type:
|
||||
return content_type[self._source_change.id]
|
||||
|
||||
return MediaType.MUSIC
|
||||
|
||||
@property
|
||||
@@ -632,6 +652,11 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Return the current playback progress."""
|
||||
return self._playback_progress.progress
|
||||
|
||||
@property
|
||||
def media_content_id(self) -> str | None:
|
||||
"""Return internal ID of Deezer, Tidal and radio stations."""
|
||||
return self._playback_metadata.source_internal_id
|
||||
|
||||
@property
|
||||
def media_image_url(self) -> str | None:
|
||||
"""Return URL of the currently playing music."""
|
||||
|
||||
@@ -64,12 +64,6 @@ async def async_migrate_entry(hass: HomeAssistant, entry: BlinkConfigEntry) -> b
|
||||
if entry.version == 2:
|
||||
await _reauth_flow_wrapper(hass, entry, data)
|
||||
return False
|
||||
if entry.version == 3:
|
||||
# Migrate device_id to hardware_id for blinkpy 0.25.x OAuth2 compatibility
|
||||
if "device_id" in data:
|
||||
data["hardware_id"] = data.pop("device_id")
|
||||
hass.config_entries.async_update_entry(entry, data=data, version=4)
|
||||
return True
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.core import callback
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN, HARDWARE_ID
|
||||
from .const import DEVICE_ID, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -43,7 +43,7 @@ async def _send_blink_2fa_pin(blink: Blink, pin: str | None) -> bool:
|
||||
class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a Blink config flow."""
|
||||
|
||||
VERSION = 4
|
||||
VERSION = 3
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the blink flow."""
|
||||
@@ -53,7 +53,7 @@ class BlinkConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
async def _handle_user_input(self, user_input: dict[str, Any]):
|
||||
"""Handle user input."""
|
||||
self.auth = Auth(
|
||||
{**user_input, "hardware_id": HARDWARE_ID},
|
||||
{**user_input, "device_id": DEVICE_ID},
|
||||
no_prompt=True,
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from homeassistant.const import Platform
|
||||
|
||||
DOMAIN = "blink"
|
||||
HARDWARE_ID = "Home Assistant"
|
||||
DEVICE_ID = "Home Assistant"
|
||||
|
||||
CONF_MIGRATE = "migrate"
|
||||
CONF_CAMERA = "camera"
|
||||
|
||||
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/blink",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["blinkpy"],
|
||||
"requirements": ["blinkpy==0.25.1"]
|
||||
"requirements": ["blinkpy==0.24.1"]
|
||||
}
|
||||
|
||||
@@ -21,6 +21,6 @@
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==3.1.2",
|
||||
"habluetooth==5.7.0"
|
||||
"habluetooth==5.8.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -8,6 +8,10 @@ from typing import Any
|
||||
from pycoolmasternet_async import SWING_MODES
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
@@ -31,7 +35,16 @@ CM_TO_HA_STATE = {
|
||||
|
||||
HA_STATE_TO_CM = {value: key for key, value in CM_TO_HA_STATE.items()}
|
||||
|
||||
FAN_MODES = ["low", "med", "high", "auto"]
|
||||
CM_TO_HA_FAN = {
|
||||
"low": FAN_LOW,
|
||||
"med": FAN_MEDIUM,
|
||||
"high": FAN_HIGH,
|
||||
"auto": FAN_AUTO,
|
||||
}
|
||||
|
||||
HA_FAN_TO_CM = {value: key for key, value in CM_TO_HA_FAN.items()}
|
||||
|
||||
FAN_MODES = list(CM_TO_HA_FAN.values())
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -111,7 +124,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
@property
|
||||
def fan_mode(self):
|
||||
"""Return the fan setting."""
|
||||
return self._unit.fan_speed
|
||||
return CM_TO_HA_FAN[self._unit.fan_speed]
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
@@ -138,7 +151,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new fan mode."""
|
||||
_LOGGER.debug("Setting fan mode of %s to %s", self.unique_id, fan_mode)
|
||||
self._unit = await self._unit.set_fan_speed(fan_mode)
|
||||
self._unit = await self._unit.set_fan_speed(HA_FAN_TO_CM[fan_mode])
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
|
||||
@@ -102,12 +102,6 @@ class ConfiguredDoorBird:
|
||||
"""Get token for device."""
|
||||
return self._token
|
||||
|
||||
def _get_hass_url(self) -> str:
|
||||
"""Get the Home Assistant URL for this device."""
|
||||
if custom_url := self.custom_url:
|
||||
return custom_url
|
||||
return get_url(self._hass, prefer_external=False)
|
||||
|
||||
async def async_register_events(self) -> None:
|
||||
"""Register events on device."""
|
||||
if not self.door_station_events:
|
||||
@@ -152,7 +146,13 @@ class ConfiguredDoorBird:
|
||||
|
||||
async def _async_register_events(self) -> dict[str, Any]:
|
||||
"""Register events on device."""
|
||||
hass_url = self._get_hass_url()
|
||||
# Override url if another is specified in the configuration
|
||||
if custom_url := self.custom_url:
|
||||
hass_url = custom_url
|
||||
else:
|
||||
# Get the URL of this server
|
||||
hass_url = get_url(self._hass, prefer_external=False)
|
||||
|
||||
http_fav = await self._async_get_http_favorites()
|
||||
if any(
|
||||
# Note that a list comp is used here to ensure all
|
||||
@@ -191,14 +191,10 @@ class ConfiguredDoorBird:
|
||||
self._get_event_name(event): event_type
|
||||
for event, event_type in DEFAULT_EVENT_TYPES
|
||||
}
|
||||
hass_url = self._get_hass_url()
|
||||
for identifier, data in http_fav.items():
|
||||
title: str | None = data.get("title")
|
||||
if not title or not title.startswith("Home Assistant"):
|
||||
continue
|
||||
value: str | None = data.get("value")
|
||||
if not value or not value.startswith(hass_url):
|
||||
continue # Not our favorite - different HA instance or stale
|
||||
event = title.partition("(")[2].strip(")")
|
||||
if input_type := favorite_input_type.get(identifier):
|
||||
events.append(DoorbirdEvent(event, input_type))
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==17.0.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.4.0"]
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ DEFAULT_TTS_MODEL = "eleven_multilingual_v2"
|
||||
DEFAULT_STABILITY = 0.5
|
||||
DEFAULT_SIMILARITY = 0.75
|
||||
DEFAULT_STT_AUTO_LANGUAGE = False
|
||||
DEFAULT_STT_MODEL = "scribe_v1"
|
||||
DEFAULT_STT_MODEL = "scribe_v2"
|
||||
DEFAULT_STYLE = 0
|
||||
DEFAULT_USE_SPEAKER_BOOST = True
|
||||
|
||||
@@ -129,4 +129,5 @@ STT_LANGUAGES = [
|
||||
STT_MODELS = {
|
||||
"scribe_v1": "Scribe v1",
|
||||
"scribe_v1_experimental": "Scribe v1 Experimental",
|
||||
"scribe_v2": "Scribe v2 Realtime",
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.9.0",
|
||||
"aioesphomeapi==42.10.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["evohome", "evohomeasync", "evohomeasync2"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["evohome-async==1.0.6"]
|
||||
"requirements": ["evohome-async==1.0.5"]
|
||||
}
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
"""API for fitbit bound to Home Assistant OAuth."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from fitbit import Fitbit
|
||||
from fitbit.exceptions import HTTPException, HTTPUnauthorized
|
||||
from fitbit_web_api import ApiClient, Configuration, DevicesApi
|
||||
from fitbit_web_api.exceptions import (
|
||||
ApiException,
|
||||
OpenApiException,
|
||||
UnauthorizedException,
|
||||
)
|
||||
from fitbit_web_api.models.device import Device
|
||||
from requests.exceptions import ConnectionError as RequestsConnectionError
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import FitbitUnitSystem
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice, FitbitProfile
|
||||
from .model import FitbitProfile
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -58,6 +66,14 @@ class FitbitApi(ABC):
|
||||
expires_at=float(token[CONF_EXPIRES_AT]),
|
||||
)
|
||||
|
||||
async def _async_get_fitbit_web_api(self) -> ApiClient:
|
||||
"""Create and return an ApiClient configured with the current access token."""
|
||||
token = await self.async_get_access_token()
|
||||
configuration = Configuration()
|
||||
configuration.pool_manager = async_get_clientsession(self._hass)
|
||||
configuration.access_token = token[CONF_ACCESS_TOKEN]
|
||||
return ApiClient(configuration)
|
||||
|
||||
async def async_get_user_profile(self) -> FitbitProfile:
|
||||
"""Return the user profile from the API."""
|
||||
if self._profile is None:
|
||||
@@ -94,21 +110,13 @@ class FitbitApi(ABC):
|
||||
return FitbitUnitSystem.METRIC
|
||||
return FitbitUnitSystem.EN_US
|
||||
|
||||
async def async_get_devices(self) -> list[FitbitDevice]:
|
||||
"""Return available devices."""
|
||||
client = await self._async_get_client()
|
||||
devices: list[dict[str, str]] = await self._run(client.get_devices)
|
||||
async def async_get_devices(self) -> list[Device]:
|
||||
"""Return available devices using fitbit-web-api."""
|
||||
client = await self._async_get_fitbit_web_api()
|
||||
devices_api = DevicesApi(client)
|
||||
devices: list[Device] = await self._run_async(devices_api.get_devices)
|
||||
_LOGGER.debug("get_devices=%s", devices)
|
||||
return [
|
||||
FitbitDevice(
|
||||
id=device["id"],
|
||||
device_version=device["deviceVersion"],
|
||||
battery_level=int(device["batteryLevel"]),
|
||||
battery=device["battery"],
|
||||
type=device["type"],
|
||||
)
|
||||
for device in devices
|
||||
]
|
||||
return devices
|
||||
|
||||
async def async_get_latest_time_series(self, resource_type: str) -> dict[str, Any]:
|
||||
"""Return the most recent value from the time series for the specified resource type."""
|
||||
@@ -140,6 +148,20 @@ class FitbitApi(ABC):
|
||||
_LOGGER.debug("Error from fitbit API: %s", err)
|
||||
raise FitbitApiException("Error from fitbit API") from err
|
||||
|
||||
async def _run_async[_T](self, func: Callable[[], Awaitable[_T]]) -> _T:
|
||||
"""Run client command."""
|
||||
try:
|
||||
return await func()
|
||||
except UnauthorizedException as err:
|
||||
_LOGGER.debug("Unauthorized error from fitbit API: %s", err)
|
||||
raise FitbitAuthException("Authentication error from fitbit API") from err
|
||||
except ApiException as err:
|
||||
_LOGGER.debug("Error from fitbit API: %s", err)
|
||||
raise FitbitApiException("Error from fitbit API") from err
|
||||
except OpenApiException as err:
|
||||
_LOGGER.debug("Error communicating with fitbit API: %s", err)
|
||||
raise FitbitApiException("Communication error from fitbit API") from err
|
||||
|
||||
|
||||
class OAuthFitbitApi(FitbitApi):
|
||||
"""Provide fitbit authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
@@ -6,6 +6,8 @@ import datetime
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from fitbit_web_api.models.device import Device
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
@@ -13,7 +15,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .api import FitbitApi
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,7 +24,7 @@ TIMEOUT = 10
|
||||
type FitbitConfigEntry = ConfigEntry[FitbitData]
|
||||
|
||||
|
||||
class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
||||
"""Coordinator for fetching fitbit devices from the API."""
|
||||
|
||||
config_entry: FitbitConfigEntry
|
||||
@@ -41,7 +42,7 @@ class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
)
|
||||
self._api = api
|
||||
|
||||
async def _async_update_data(self) -> dict[str, FitbitDevice]:
|
||||
async def _async_update_data(self) -> dict[str, Device]:
|
||||
"""Fetch data from API endpoint."""
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
try:
|
||||
@@ -50,7 +51,7 @@ class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
except FitbitApiException as err:
|
||||
raise UpdateFailed(err) from err
|
||||
return {device.id: device for device in devices}
|
||||
return {device.id: device for device in devices if device.id is not None}
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"dependencies": ["application_credentials", "http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/fitbit",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["fitbit"],
|
||||
"requirements": ["fitbit==0.3.1"]
|
||||
"loggers": ["fitbit", "fitbit_web_api"],
|
||||
"requirements": ["fitbit==0.3.1", "fitbit-web-api==2.13.5"]
|
||||
}
|
||||
|
||||
@@ -21,26 +21,6 @@ class FitbitProfile:
|
||||
"""The locale defined in the user's Fitbit account settings."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitDevice:
|
||||
"""Device from the Fitbit API response."""
|
||||
|
||||
id: str
|
||||
"""The device ID."""
|
||||
|
||||
device_version: str
|
||||
"""The product name of the device."""
|
||||
|
||||
battery_level: int
|
||||
"""The battery level as a percentage."""
|
||||
|
||||
battery: str
|
||||
"""Returns the battery level of the device."""
|
||||
|
||||
type: str
|
||||
"""The type of the device such as TRACKER or SCALE."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitConfig:
|
||||
"""Information from the fitbit ConfigEntry data."""
|
||||
|
||||
@@ -8,6 +8,8 @@ import datetime
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
||||
from fitbit_web_api.models.device import Device
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -32,7 +34,7 @@ from .api import FitbitApi
|
||||
from .const import ATTRIBUTION, BATTERY_LEVELS, DOMAIN, FitbitScope, FitbitUnitSystem
|
||||
from .coordinator import FitbitConfigEntry, FitbitDeviceCoordinator
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice, config_from_entry_data
|
||||
from .model import config_from_entry_data
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
@@ -657,7 +659,7 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
coordinator: FitbitDeviceCoordinator,
|
||||
user_profile_id: str,
|
||||
description: FitbitSensorEntityDescription,
|
||||
device: FitbitDevice,
|
||||
device: Device,
|
||||
enable_default_override: bool,
|
||||
) -> None:
|
||||
"""Initialize the Fitbit sensor."""
|
||||
@@ -677,7 +679,9 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
@property
|
||||
def icon(self) -> str | None:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
if battery_level := BATTERY_LEVELS.get(self.device.battery):
|
||||
if self.device.battery is not None and (
|
||||
battery_level := BATTERY_LEVELS.get(self.device.battery)
|
||||
):
|
||||
return icon_for_battery_level(battery_level=battery_level)
|
||||
return self.entity_description.icon
|
||||
|
||||
@@ -697,7 +701,7 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.device = self.coordinator.data[self.device.id]
|
||||
self.device = self.coordinator.data[cast(str, self.device.id)]
|
||||
self._attr_native_value = self.device.battery
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -715,7 +719,7 @@ class FitbitBatteryLevelSensor(
|
||||
coordinator: FitbitDeviceCoordinator,
|
||||
user_profile_id: str,
|
||||
description: FitbitSensorEntityDescription,
|
||||
device: FitbitDevice,
|
||||
device: Device,
|
||||
) -> None:
|
||||
"""Initialize the Fitbit sensor."""
|
||||
super().__init__(coordinator)
|
||||
@@ -736,6 +740,6 @@ class FitbitBatteryLevelSensor(
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.device = self.coordinator.data[self.device.id]
|
||||
self.device = self.coordinator.data[cast(str, self.device.id)]
|
||||
self._attr_native_value = self.device.battery_level
|
||||
self.async_write_ha_state()
|
||||
|
||||
53
homeassistant/components/fressnapf_tracker/__init__.py
Normal file
53
homeassistant/components/fressnapf_tracker/__init__.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""The Fressnapf Tracker integration."""
|
||||
|
||||
from fressnapftracker import AuthClient
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_USER_ID
|
||||
from .coordinator import (
|
||||
FressnapfTrackerConfigEntry,
|
||||
FressnapfTrackerDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.DEVICE_TRACKER,
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: FressnapfTrackerConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Fressnapf Tracker from a config entry."""
|
||||
auth_client = AuthClient(client=get_async_client(hass))
|
||||
devices = await auth_client.get_devices(
|
||||
user_id=entry.data[CONF_USER_ID],
|
||||
user_access_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
)
|
||||
|
||||
coordinators: list[FressnapfTrackerDataUpdateCoordinator] = []
|
||||
for device in devices:
|
||||
coordinator = FressnapfTrackerDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
device,
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
coordinators.append(coordinator)
|
||||
|
||||
entry.runtime_data = coordinators
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: FressnapfTrackerConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
69
homeassistant/components/fressnapf_tracker/binary_sensor.py
Normal file
69
homeassistant/components/fressnapf_tracker/binary_sensor.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Binary Sensor platform for fressnapf_tracker."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from fressnapftracker import Tracker
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
BinarySensorEntity,
|
||||
BinarySensorEntityDescription,
|
||||
)
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import FressnapfTrackerConfigEntry
|
||||
from .entity import FressnapfTrackerEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class FressnapfTrackerBinarySensorDescription(BinarySensorEntityDescription):
|
||||
"""Class describing Fressnapf Tracker binary_sensor entities."""
|
||||
|
||||
value_fn: Callable[[Tracker], bool]
|
||||
|
||||
|
||||
BINARY_SENSOR_ENTITY_DESCRIPTIONS: tuple[
|
||||
FressnapfTrackerBinarySensorDescription, ...
|
||||
] = (
|
||||
FressnapfTrackerBinarySensorDescription(
|
||||
key="charging",
|
||||
device_class=BinarySensorDeviceClass.BATTERY_CHARGING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.charging,
|
||||
),
|
||||
FressnapfTrackerBinarySensorDescription(
|
||||
translation_key="deep_sleep",
|
||||
key="deep_sleep_value",
|
||||
device_class=BinarySensorDeviceClass.POWER,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: bool(data.deep_sleep_value),
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FressnapfTrackerConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Fressnapf Tracker binary_sensors."""
|
||||
|
||||
async_add_entities(
|
||||
FressnapfTrackerBinarySensor(coordinator, sensor_description)
|
||||
for sensor_description in BINARY_SENSOR_ENTITY_DESCRIPTIONS
|
||||
for coordinator in entry.runtime_data
|
||||
)
|
||||
|
||||
|
||||
class FressnapfTrackerBinarySensor(FressnapfTrackerEntity, BinarySensorEntity):
|
||||
"""Fressnapf Tracker binary_sensor for general information."""
|
||||
|
||||
entity_description: FressnapfTrackerBinarySensorDescription
|
||||
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return True if the binary sensor is on."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
193
homeassistant/components/fressnapf_tracker/config_flow.py
Normal file
193
homeassistant/components/fressnapf_tracker/config_flow.py
Normal file
@@ -0,0 +1,193 @@
|
||||
"""Config flow for the Fressnapf Tracker integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from fressnapftracker import (
|
||||
AuthClient,
|
||||
FressnapfTrackerInvalidPhoneNumberError,
|
||||
FressnapfTrackerInvalidTokenError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_PHONE_NUMBER, CONF_SMS_CODE, CONF_USER_ID, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PHONE_NUMBER): str,
|
||||
}
|
||||
)
|
||||
STEP_SMS_CODE_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SMS_CODE): int,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class FressnapfTrackerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Fressnapf Tracker."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Init Config Flow."""
|
||||
self._context: dict[str, Any] = {}
|
||||
self._auth_client: AuthClient | None = None
|
||||
|
||||
@property
|
||||
def auth_client(self) -> AuthClient:
|
||||
"""Return the auth client, creating it if needed."""
|
||||
if self._auth_client is None:
|
||||
self._auth_client = AuthClient(client=get_async_client(self.hass))
|
||||
return self._auth_client
|
||||
|
||||
async def _async_request_sms_code(
|
||||
self, phone_number: str
|
||||
) -> tuple[dict[str, str], bool]:
|
||||
"""Request SMS code and return errors dict and success flag."""
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
response = await self.auth_client.request_sms_code(
|
||||
phone_number=phone_number
|
||||
)
|
||||
except FressnapfTrackerInvalidPhoneNumberError:
|
||||
errors["base"] = "invalid_phone_number"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
_LOGGER.debug("SMS code request response: %s", response)
|
||||
self._context[CONF_USER_ID] = response.id
|
||||
self._context[CONF_PHONE_NUMBER] = phone_number
|
||||
return errors, True
|
||||
return errors, False
|
||||
|
||||
async def _async_verify_sms_code(
|
||||
self, sms_code: int
|
||||
) -> tuple[dict[str, str], str | None]:
|
||||
"""Verify SMS code and return errors and access_token."""
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
verification_response = await self.auth_client.verify_phone_number(
|
||||
user_id=self._context[CONF_USER_ID],
|
||||
sms_code=sms_code,
|
||||
)
|
||||
except FressnapfTrackerInvalidTokenError:
|
||||
errors["base"] = "invalid_sms_code"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception during SMS code verification")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Phone number verification response: %s", verification_response
|
||||
)
|
||||
return errors, verification_response.user_token.access_token
|
||||
return errors, None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_PHONE_NUMBER: user_input[CONF_PHONE_NUMBER]}
|
||||
)
|
||||
errors, success = await self._async_request_sms_code(
|
||||
user_input[CONF_PHONE_NUMBER]
|
||||
)
|
||||
if success:
|
||||
await self.async_set_unique_id(str(self._context[CONF_USER_ID]))
|
||||
self._abort_if_unique_id_configured()
|
||||
return await self.async_step_sms_code()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_sms_code(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the SMS code step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
errors, access_token = await self._async_verify_sms_code(
|
||||
user_input[CONF_SMS_CODE]
|
||||
)
|
||||
if access_token:
|
||||
return self.async_create_entry(
|
||||
title=self._context[CONF_PHONE_NUMBER],
|
||||
data={
|
||||
CONF_PHONE_NUMBER: self._context[CONF_PHONE_NUMBER],
|
||||
CONF_USER_ID: self._context[CONF_USER_ID],
|
||||
CONF_ACCESS_TOKEN: access_token,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="sms_code",
|
||||
data_schema=STEP_SMS_CODE_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
errors, success = await self._async_request_sms_code(
|
||||
user_input[CONF_PHONE_NUMBER]
|
||||
)
|
||||
if success:
|
||||
if reconfigure_entry.data[CONF_USER_ID] != self._context[CONF_USER_ID]:
|
||||
errors["base"] = "account_change_not_allowed"
|
||||
else:
|
||||
return await self.async_step_reconfigure_sms_code()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_PHONE_NUMBER,
|
||||
default=reconfigure_entry.data.get(CONF_PHONE_NUMBER),
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure_sms_code(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the SMS code step during reconfiguration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
errors, access_token = await self._async_verify_sms_code(
|
||||
user_input[CONF_SMS_CODE]
|
||||
)
|
||||
if access_token:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data={
|
||||
CONF_PHONE_NUMBER: self._context[CONF_PHONE_NUMBER],
|
||||
CONF_USER_ID: self._context[CONF_USER_ID],
|
||||
CONF_ACCESS_TOKEN: access_token,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure_sms_code",
|
||||
data_schema=STEP_SMS_CODE_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
6
homeassistant/components/fressnapf_tracker/const.py
Normal file
6
homeassistant/components/fressnapf_tracker/const.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Constants for the Fressnapf Tracker integration."""
|
||||
|
||||
DOMAIN = "fressnapf_tracker"
|
||||
CONF_PHONE_NUMBER = "phone_number"
|
||||
CONF_SMS_CODE = "sms_code"
|
||||
CONF_USER_ID = "user_id"
|
||||
50
homeassistant/components/fressnapf_tracker/coordinator.py
Normal file
50
homeassistant/components/fressnapf_tracker/coordinator.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Data update coordinator for Fressnapf Tracker integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from fressnapftracker import ApiClient, Device, FressnapfTrackerError, Tracker
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type FressnapfTrackerConfigEntry = ConfigEntry[
|
||||
list[FressnapfTrackerDataUpdateCoordinator]
|
||||
]
|
||||
|
||||
|
||||
class FressnapfTrackerDataUpdateCoordinator(DataUpdateCoordinator[Tracker]):
|
||||
"""Class to manage fetching data from the API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: FressnapfTrackerConfigEntry,
|
||||
device: Device,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=15),
|
||||
config_entry=config_entry,
|
||||
)
|
||||
self.device = device
|
||||
self.client = ApiClient(
|
||||
serial_number=device.serialnumber,
|
||||
device_token=device.token,
|
||||
client=get_async_client(hass),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> Tracker:
|
||||
try:
|
||||
return await self.client.get_tracker()
|
||||
except FressnapfTrackerError as exception:
|
||||
raise UpdateFailed(exception) from exception
|
||||
69
homeassistant/components/fressnapf_tracker/device_tracker.py
Normal file
69
homeassistant/components/fressnapf_tracker/device_tracker.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Device tracker platform for fressnapf_tracker."""
|
||||
|
||||
from homeassistant.components.device_tracker import SourceType
|
||||
from homeassistant.components.device_tracker.config_entry import TrackerEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import FressnapfTrackerConfigEntry, FressnapfTrackerDataUpdateCoordinator
|
||||
from .entity import FressnapfTrackerBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FressnapfTrackerConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the fressnapf_tracker device_trackers."""
|
||||
async_add_entities(
|
||||
FressnapfTrackerDeviceTracker(coordinator) for coordinator in entry.runtime_data
|
||||
)
|
||||
|
||||
|
||||
class FressnapfTrackerDeviceTracker(FressnapfTrackerBaseEntity, TrackerEntity):
|
||||
"""fressnapf_tracker device tracker."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_translation_key = "pet"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FressnapfTrackerDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the device tracker."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = coordinator.device.serialnumber
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.coordinator.data.position is not None
|
||||
|
||||
@property
|
||||
def latitude(self) -> float | None:
|
||||
"""Return latitude value of the device."""
|
||||
if self.coordinator.data.position is not None:
|
||||
return self.coordinator.data.position.lat
|
||||
return None
|
||||
|
||||
@property
|
||||
def longitude(self) -> float | None:
|
||||
"""Return longitude value of the device."""
|
||||
if self.coordinator.data.position is not None:
|
||||
return self.coordinator.data.position.lng
|
||||
return None
|
||||
|
||||
@property
|
||||
def source_type(self) -> SourceType:
|
||||
"""Return the source type, eg gps or router, of the device."""
|
||||
return SourceType.GPS
|
||||
|
||||
@property
|
||||
def location_accuracy(self) -> float:
|
||||
"""Return the location accuracy of the device.
|
||||
|
||||
Value in meters.
|
||||
"""
|
||||
if self.coordinator.data.position is not None:
|
||||
return float(self.coordinator.data.position.accuracy)
|
||||
return 0
|
||||
42
homeassistant/components/fressnapf_tracker/entity.py
Normal file
42
homeassistant/components/fressnapf_tracker/entity.py
Normal file
@@ -0,0 +1,42 @@
|
||||
"""fressnapf_tracker class."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import FressnapfTrackerDataUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class FressnapfTrackerBaseEntity(
|
||||
CoordinatorEntity[FressnapfTrackerDataUpdateCoordinator]
|
||||
):
|
||||
"""Base entity for Fressnapf Tracker."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: FressnapfTrackerDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.id = coordinator.device.serialnumber
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(self.id))},
|
||||
name=str(self.coordinator.data.name),
|
||||
model=str(self.coordinator.data.tracker_settings.generation),
|
||||
manufacturer="Fressnapf",
|
||||
serial_number=str(self.id),
|
||||
)
|
||||
|
||||
|
||||
class FressnapfTrackerEntity(FressnapfTrackerBaseEntity):
|
||||
"""Entity for fressnapf_tracker."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FressnapfTrackerDataUpdateCoordinator,
|
||||
entity_description: EntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.entity_description = entity_description
|
||||
self._attr_unique_id = f"{self.id}_{entity_description.key}"
|
||||
9
homeassistant/components/fressnapf_tracker/icons.json
Normal file
9
homeassistant/components/fressnapf_tracker/icons.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"device_tracker": {
|
||||
"pet": {
|
||||
"default": "mdi:paw"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
11
homeassistant/components/fressnapf_tracker/manifest.json
Normal file
11
homeassistant/components/fressnapf_tracker/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "fressnapf_tracker",
|
||||
"name": "Fressnapf Tracker",
|
||||
"codeowners": ["@eifinger"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/fressnapf_tracker",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fressnapftracker==0.1.2"]
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: todo
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
63
homeassistant/components/fressnapf_tracker/sensor.py
Normal file
63
homeassistant/components/fressnapf_tracker/sensor.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Sensor platform for fressnapf_tracker."""
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from fressnapftracker import Tracker
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import PERCENTAGE, EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import FressnapfTrackerConfigEntry
|
||||
from .entity import FressnapfTrackerEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class FressnapfTrackerSensorDescription(SensorEntityDescription):
|
||||
"""Class describing Fressnapf Tracker sensor entities."""
|
||||
|
||||
value_fn: Callable[[Tracker], int]
|
||||
|
||||
|
||||
SENSOR_ENTITY_DESCRIPTIONS: tuple[FressnapfTrackerSensorDescription, ...] = (
|
||||
FressnapfTrackerSensorDescription(
|
||||
key="battery",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.BATTERY,
|
||||
native_unit_of_measurement=PERCENTAGE,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
value_fn=lambda data: data.battery,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FressnapfTrackerConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Fressnapf Tracker sensors."""
|
||||
|
||||
async_add_entities(
|
||||
FressnapfTrackerSensor(coordinator, sensor_description)
|
||||
for sensor_description in SENSOR_ENTITY_DESCRIPTIONS
|
||||
for coordinator in entry.runtime_data
|
||||
)
|
||||
|
||||
|
||||
class FressnapfTrackerSensor(FressnapfTrackerEntity, SensorEntity):
|
||||
"""fressnapf_tracker sensor for general information."""
|
||||
|
||||
entity_description: FressnapfTrackerSensorDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> int:
|
||||
"""Return the state of the resources if it has been received yet."""
|
||||
return self.entity_description.value_fn(self.coordinator.data)
|
||||
56
homeassistant/components/fressnapf_tracker/strings.json
Normal file
56
homeassistant/components/fressnapf_tracker/strings.json
Normal file
@@ -0,0 +1,56 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"account_change_not_allowed": "Reconfiguring to a different account is not allowed. Please create a new entry instead.",
|
||||
"invalid_phone_number": "Please enter a valid phone number.",
|
||||
"invalid_sms_code": "The SMS code you entered is invalid.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"phone_number": "[%key:component::fressnapf_tracker::config::step::user::data::phone_number%]"
|
||||
},
|
||||
"data_description": {
|
||||
"phone_number": "[%key:component::fressnapf_tracker::config::step::user::data_description::phone_number%]"
|
||||
},
|
||||
"description": "Re-authenticate with your Fressnapf Tracker account to refresh your credentials."
|
||||
},
|
||||
"reconfigure_sms_code": {
|
||||
"data": {
|
||||
"sms_code": "[%key:component::fressnapf_tracker::config::step::sms_code::data::sms_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"sms_code": "[%key:component::fressnapf_tracker::config::step::sms_code::data_description::sms_code%]"
|
||||
}
|
||||
},
|
||||
"sms_code": {
|
||||
"data": {
|
||||
"sms_code": "SMS code"
|
||||
},
|
||||
"data_description": {
|
||||
"sms_code": "Enter the SMS code you received on your phone."
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"phone_number": "Phone number"
|
||||
},
|
||||
"data_description": {
|
||||
"phone_number": "Enter your phone number in international format (e.g., +4917612345678)."
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"binary_sensor": {
|
||||
"deep_sleep": {
|
||||
"name": "Deep sleep"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251203.2"]
|
||||
"requirements": ["home-assistant-frontend==20251202.0"]
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"winter_mode": {
|
||||
"description": "Adds falling snowflakes on your screen. Get your home ready for winter! ❄️\n\nIf you have animations disabled in your device accessibility settings, this feature will not work.",
|
||||
"disable_confirmation": "Snowflakes will no longer fall on your screen. You can re-enable this at any time in Labs settings.",
|
||||
"enable_confirmation": "Snowflakes will start falling on your screen. You can turn this off at any time in Labs settings.",
|
||||
"description": "Adds falling snowflakes on your screen. Get your home ready for winter! ❄️",
|
||||
"disable_confirmation": "Snowflakes will no longer fall on your screen. You can re-enable this at any time in labs settings.",
|
||||
"enable_confirmation": "Snowflakes will start falling on your screen. You can turn this off at any time in labs settings.",
|
||||
"name": "Winter mode"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["googleapiclient"],
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==12.1.1"]
|
||||
"requirements": ["gcal-sync==8.0.0", "oauth2client==4.1.3", "ical==11.1.0"]
|
||||
}
|
||||
|
||||
@@ -51,9 +51,9 @@ async def _validate_input(
|
||||
description_placeholders: dict[str, str],
|
||||
) -> bool:
|
||||
try:
|
||||
await api.async_get_current_conditions(
|
||||
await api.async_air_quality(
|
||||
lat=user_input[CONF_LOCATION][CONF_LATITUDE],
|
||||
lon=user_input[CONF_LOCATION][CONF_LONGITUDE],
|
||||
long=user_input[CONF_LOCATION][CONF_LONGITUDE],
|
||||
)
|
||||
except GoogleAirQualityApiError as err:
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
@@ -7,7 +7,7 @@ from typing import Final
|
||||
|
||||
from google_air_quality_api.api import GoogleAirQualityApi
|
||||
from google_air_quality_api.exceptions import GoogleAirQualityApiError
|
||||
from google_air_quality_api.model import AirQualityCurrentConditionsData
|
||||
from google_air_quality_api.model import AirQualityData
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
|
||||
@@ -23,9 +23,7 @@ UPDATE_INTERVAL: Final = timedelta(hours=1)
|
||||
type GoogleAirQualityConfigEntry = ConfigEntry[GoogleAirQualityRuntimeData]
|
||||
|
||||
|
||||
class GoogleAirQualityUpdateCoordinator(
|
||||
DataUpdateCoordinator[AirQualityCurrentConditionsData]
|
||||
):
|
||||
class GoogleAirQualityUpdateCoordinator(DataUpdateCoordinator[AirQualityData]):
|
||||
"""Coordinator for fetching Google AirQuality data."""
|
||||
|
||||
config_entry: GoogleAirQualityConfigEntry
|
||||
@@ -50,10 +48,10 @@ class GoogleAirQualityUpdateCoordinator(
|
||||
self.lat = subentry.data[CONF_LATITUDE]
|
||||
self.long = subentry.data[CONF_LONGITUDE]
|
||||
|
||||
async def _async_update_data(self) -> AirQualityCurrentConditionsData:
|
||||
async def _async_update_data(self) -> AirQualityData:
|
||||
"""Fetch air quality data for this coordinate."""
|
||||
try:
|
||||
return await self.client.async_get_current_conditions(self.lat, self.long)
|
||||
return await self.client.async_air_quality(self.lat, self.long)
|
||||
except GoogleAirQualityApiError as ex:
|
||||
_LOGGER.debug("Cannot fetch air quality data: %s", str(ex))
|
||||
raise UpdateFailed(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_air_quality_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["google_air_quality_api==2.0.2"]
|
||||
"requirements": ["google_air_quality_api==1.1.3"]
|
||||
}
|
||||
|
||||
@@ -4,7 +4,7 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from google_air_quality_api.model import AirQualityCurrentConditionsData
|
||||
from google_air_quality_api.model import AirQualityData
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -33,17 +33,15 @@ PARALLEL_UPDATES = 0
|
||||
class AirQualitySensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes Air Quality sensor entity."""
|
||||
|
||||
exists_fn: Callable[[AirQualityCurrentConditionsData], bool] = lambda _: True
|
||||
options_fn: Callable[[AirQualityCurrentConditionsData], list[str] | None] = (
|
||||
exists_fn: Callable[[AirQualityData], bool] = lambda _: True
|
||||
options_fn: Callable[[AirQualityData], list[str] | None] = lambda _: None
|
||||
value_fn: Callable[[AirQualityData], StateType]
|
||||
native_unit_of_measurement_fn: Callable[[AirQualityData], str | None] = (
|
||||
lambda _: None
|
||||
)
|
||||
value_fn: Callable[[AirQualityCurrentConditionsData], StateType]
|
||||
native_unit_of_measurement_fn: Callable[
|
||||
[AirQualityCurrentConditionsData], str | None
|
||||
] = lambda _: None
|
||||
translation_placeholders_fn: (
|
||||
Callable[[AirQualityCurrentConditionsData], dict[str, str]] | None
|
||||
) = None
|
||||
translation_placeholders_fn: Callable[[AirQualityData], dict[str, str]] | None = (
|
||||
None
|
||||
)
|
||||
|
||||
|
||||
AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
|
||||
@@ -59,14 +59,9 @@
|
||||
"user": "Add location"
|
||||
},
|
||||
"step": {
|
||||
"location": {
|
||||
"user": {
|
||||
"data": {
|
||||
"location": "[%key:common::config_flow::data::location%]",
|
||||
"name": "[%key:common::config_flow::data::name%]"
|
||||
},
|
||||
"data_description": {
|
||||
"location": "[%key:component::google_air_quality::config::step::user::data_description::location%]",
|
||||
"name": "[%key:component::google_air_quality::config::step::user::data_description::name%]"
|
||||
"location": "[%key:common::config_flow::data::location%]"
|
||||
},
|
||||
"description": "Select the coordinates for which you want to create an entry.",
|
||||
"title": "Air quality data location"
|
||||
@@ -88,16 +83,16 @@
|
||||
"1b_good_air_quality": "1B - Good air quality",
|
||||
"2_cyan": "2 - Cyan",
|
||||
"2_light_green": "2 - Light green",
|
||||
"2_orange": "4 - Orange",
|
||||
"2_red": "5 - Red",
|
||||
"2_yellow": "3 - Yellow",
|
||||
"2a_acceptable_air_quality": "2A - Acceptable air quality",
|
||||
"2b_acceptable_air_quality": "2B - Acceptable air quality",
|
||||
"3_green": "3 - Green",
|
||||
"3_yellow": "3 - Yellow",
|
||||
"3a_aggravated_air_quality": "3A - Aggravated air quality",
|
||||
"3b_bad_air_quality": "3B - Bad air quality",
|
||||
"4_orange": "4 - Orange",
|
||||
"4_yellow_watch": "4 - Yellow/Watch",
|
||||
"5_orange_alert": "5 - Orange/Alert",
|
||||
"5_red": "5 - Red",
|
||||
"6_red_alert": "6 - Red/Alert+",
|
||||
"10_33": "10-33% of guideline",
|
||||
"33_66": "33-66% of guideline",
|
||||
|
||||
@@ -149,7 +149,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
}
|
||||
),
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
description_placeholders={"example_image_path": "/config/www/image.jpg"},
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@@ -23,7 +23,7 @@ CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "models/gemini-2.5-flash"
|
||||
RECOMMENDED_STT_MODEL = RECOMMENDED_CHAT_MODEL
|
||||
RECOMMENDED_TTS_MODEL = "models/gemini-2.5-flash-preview-tts"
|
||||
RECOMMENDED_IMAGE_MODEL = "models/gemini-2.5-flash-image"
|
||||
RECOMMENDED_IMAGE_MODEL = "models/gemini-2.5-flash-image-preview"
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_TOP_P = "top_p"
|
||||
|
||||
@@ -162,7 +162,7 @@
|
||||
"fields": {
|
||||
"filenames": {
|
||||
"description": "Attachments to add to the prompt (images, PDFs, etc)",
|
||||
"example": "{example_image_path}",
|
||||
"example": "/config/www/image.jpg",
|
||||
"name": "Attachment filenames"
|
||||
},
|
||||
"prompt": {
|
||||
|
||||
@@ -159,5 +159,4 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
_async_handle_upload,
|
||||
schema=UPLOAD_SERVICE_SCHEMA,
|
||||
supports_response=SupportsResponse.OPTIONAL,
|
||||
description_placeholders={"example_image_path": "/config/www/image.jpg"},
|
||||
)
|
||||
|
||||
@@ -92,7 +92,7 @@
|
||||
},
|
||||
"filename": {
|
||||
"description": "Path to the image or video to upload.",
|
||||
"example": "{example_image_path}",
|
||||
"example": "/config/www/image.jpg",
|
||||
"name": "Filename"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -53,7 +53,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_IGNORE_NON_NUMERIC, DOMAIN
|
||||
from .entity import GroupEntity
|
||||
@@ -374,7 +374,7 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Query all members and determine the sensor group state."""
|
||||
self.calculate_state_attributes(self._get_valid_entities())
|
||||
states: list[StateType] = []
|
||||
states: list[str] = []
|
||||
valid_units = self._valid_units
|
||||
valid_states: list[bool] = []
|
||||
sensor_values: list[tuple[str, float, State]] = []
|
||||
|
||||
@@ -37,7 +37,6 @@ def get_device_list_classic(
|
||||
login_response = api.login(config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
# DEBUG: Log the actual response structure
|
||||
except Exception as ex:
|
||||
_LOGGER.error("DEBUG - Login response: %s", login_response)
|
||||
raise ConfigEntryError(
|
||||
f"Error communicating with Growatt API during login: {ex}"
|
||||
) from ex
|
||||
|
||||
@@ -113,9 +113,6 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
min_settings = self.api.min_settings(self.device_id)
|
||||
min_energy = self.api.min_energy(self.device_id)
|
||||
except growattServer.GrowattV1ApiError as err:
|
||||
_LOGGER.error(
|
||||
"Error fetching min device data for %s: %s", self.device_id, err
|
||||
)
|
||||
raise UpdateFailed(f"Error fetching min device data: {err}") from err
|
||||
|
||||
min_info = {**min_details, **min_settings, **min_energy}
|
||||
@@ -180,7 +177,6 @@ class GrowattCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
try:
|
||||
return await self.hass.async_add_executor_job(self._sync_update_data)
|
||||
except json.decoder.JSONDecodeError as err:
|
||||
_LOGGER.error("Unable to fetch data from Growatt server: %s", err)
|
||||
raise UpdateFailed(f"Error fetching data: {err}") from err
|
||||
|
||||
def get_currency(self):
|
||||
|
||||
74
homeassistant/components/growatt_server/quality_scale.yaml
Normal file
74
homeassistant/components/growatt_server/quality_scale.yaml
Normal file
@@ -0,0 +1,74 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: data-descriptions missing
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: done
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: todo
|
||||
comment: Update server URL dropdown to show regional descriptions (e.g., 'China', 'United States') instead of raw URLs.
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable:
|
||||
status: todo
|
||||
comment: Replace bare Exception catches in __init__.py with specific growattServer exceptions.
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: todo
|
||||
comment: Add serial_number field to DeviceInfo in sensor, number, and switch platforms using device_id/serial_id.
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category:
|
||||
status: todo
|
||||
comment: Add EntityCategory.DIAGNOSTIC to temperature and other diagnostic sensors. Merge GrowattRequiredKeysMixin into GrowattSensorEntityDescription using kw_only=True.
|
||||
entity-device-class:
|
||||
status: todo
|
||||
comment: Replace custom precision field with suggested_display_precision to preserve full data granularity.
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations: todo
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: Integration does not raise repairable issues.
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: todo
|
||||
strict-typing: todo
|
||||
@@ -27,7 +27,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eBatChargeToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_battery_charge_lifetime",
|
||||
@@ -43,7 +42,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eBatDisChargeToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_battery_discharge_lifetime",
|
||||
@@ -59,7 +57,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="epvToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_solar_generation_lifetime",
|
||||
@@ -75,7 +72,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pDischarge1",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_battery_voltage",
|
||||
@@ -105,7 +101,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="elocalLoadToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_load_consumption_lifetime",
|
||||
@@ -121,7 +116,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="etoGridToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_export_to_grid_lifetime",
|
||||
@@ -138,7 +132,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="chargePower",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_load_consumption",
|
||||
@@ -146,7 +139,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pLocalLoad",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_wattage_pv_1",
|
||||
@@ -154,7 +146,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pPv1",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_wattage_pv_2",
|
||||
@@ -162,7 +153,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pPv2",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_wattage_pv_all",
|
||||
@@ -170,7 +160,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_export_to_grid",
|
||||
@@ -178,7 +167,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pactogrid",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_import_from_grid",
|
||||
@@ -186,7 +174,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pactouser",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_battery_discharge_kw",
|
||||
@@ -194,7 +181,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pdisCharge1",
|
||||
native_unit_of_measurement=UnitOfPower.KILO_WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_grid_voltage",
|
||||
@@ -210,7 +196,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eCharge",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_load_consumption_solar_today",
|
||||
@@ -218,7 +203,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eChargeToday",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_self_consumption_today",
|
||||
@@ -226,7 +210,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="eChargeToday1",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_load_consumption_battery_today",
|
||||
@@ -234,7 +217,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="echarge1",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="mix_import_from_grid_today",
|
||||
@@ -242,7 +224,6 @@ MIX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="etouser",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
# This sensor is manually created using the most recent X-Axis value from the chartData
|
||||
GrowattSensorEntityDescription(
|
||||
|
||||
@@ -79,7 +79,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv1",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -123,7 +122,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv2",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -167,7 +165,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv3",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -211,7 +208,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv4",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -238,7 +234,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="ppv",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -263,7 +258,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pac",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -329,7 +323,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="bdc1DischargePower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_battery_1_discharge_total",
|
||||
@@ -346,7 +339,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="bdc2DischargePower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_battery_2_discharge_total",
|
||||
@@ -380,7 +372,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="bdc1ChargePower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_battery_1_charge_total",
|
||||
@@ -397,7 +388,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="bdc2ChargePower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="tlx_battery_2_charge_total",
|
||||
@@ -455,7 +445,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pacToLocalLoad",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -464,7 +453,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pacToUserTotal",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -473,7 +461,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pacToGridTotal",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -558,7 +545,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="psystem",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
@@ -567,7 +553,6 @@ TLX_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="pself",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
precision=1,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -50,6 +50,5 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="nominalPower",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/hanna",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["hanna-cloud==0.0.7"]
|
||||
"requirements": ["hanna-cloud==0.0.6"]
|
||||
}
|
||||
|
||||
@@ -211,7 +211,7 @@ async def ws_start_preview(
|
||||
|
||||
@callback
|
||||
def async_preview_updated(
|
||||
last_exception: Exception | None, state: str, attributes: Mapping[str, Any]
|
||||
last_exception: BaseException | None, state: str, attributes: Mapping[str, Any]
|
||||
) -> None:
|
||||
"""Forward config entry state events to websocket."""
|
||||
if last_exception:
|
||||
|
||||
@@ -241,7 +241,9 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
|
||||
async def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[Exception | None, str, Mapping[str, Any]], None],
|
||||
preview_callback: Callable[
|
||||
[BaseException | None, str, Mapping[str, Any]], None
|
||||
],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
|
||||
@@ -23,6 +23,6 @@
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aiohomeconnect==0.23.1"],
|
||||
"requirements": ["aiohomeconnect==0.24.0"],
|
||||
"zeroconf": ["_homeconnect._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
import logging
|
||||
|
||||
from HueBLE import ConnectionError, HueBleError, HueBleLight
|
||||
from HueBLE import HueBleLight
|
||||
|
||||
from homeassistant.components.bluetooth import (
|
||||
async_ble_device_from_address,
|
||||
@@ -38,15 +38,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: HueBLEConfigEntry) -> bo
|
||||
|
||||
light = HueBleLight(ble_device)
|
||||
|
||||
try:
|
||||
await light.connect()
|
||||
await light.poll_state()
|
||||
except ConnectionError as e:
|
||||
raise ConfigEntryNotReady("Device found but unable to connect.") from e
|
||||
except HueBleError as e:
|
||||
raise ConfigEntryNotReady(
|
||||
"Device found and connected but unable to poll values from it."
|
||||
) from e
|
||||
if not await light.connect() or not await light.poll_state():
|
||||
raise ConfigEntryNotReady("Device found but unable to connect.")
|
||||
|
||||
entry.runtime_data = light
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ from enum import Enum
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from HueBLE import ConnectionError, HueBleError, HueBleLight, PairingError
|
||||
from HueBLE import HueBleLight
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import bluetooth
|
||||
@@ -20,7 +20,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN, URL_FACTORY_RESET, URL_PAIRING_MODE
|
||||
from .const import DOMAIN, URL_PAIRING_MODE
|
||||
from .light import get_available_color_modes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -41,22 +41,32 @@ async def validate_input(hass: HomeAssistant, address: str) -> Error | None:
|
||||
|
||||
try:
|
||||
light = HueBleLight(ble_device)
|
||||
await light.connect()
|
||||
get_available_color_modes(light)
|
||||
await light.poll_state()
|
||||
|
||||
except ConnectionError as e:
|
||||
_LOGGER.exception("Error connecting to light")
|
||||
return (
|
||||
Error.INVALID_AUTH
|
||||
if type(e.__cause__) is PairingError
|
||||
else Error.CANNOT_CONNECT
|
||||
)
|
||||
except HueBleError:
|
||||
await light.connect()
|
||||
|
||||
if light.authenticated is None:
|
||||
_LOGGER.warning(
|
||||
"Unable to determine if light authenticated, proceeding anyway"
|
||||
)
|
||||
elif not light.authenticated:
|
||||
return Error.INVALID_AUTH
|
||||
|
||||
if not light.connected:
|
||||
return Error.CANNOT_CONNECT
|
||||
|
||||
try:
|
||||
get_available_color_modes(light)
|
||||
except HomeAssistantError:
|
||||
return Error.NOT_SUPPORTED
|
||||
|
||||
_, errors = await light.poll_state()
|
||||
if len(errors) != 0:
|
||||
_LOGGER.warning("Errors raised when connecting to light: %s", errors)
|
||||
return Error.CANNOT_CONNECT
|
||||
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error validating light connection")
|
||||
return Error.UNKNOWN
|
||||
except HomeAssistantError:
|
||||
return Error.NOT_SUPPORTED
|
||||
else:
|
||||
return None
|
||||
finally:
|
||||
@@ -119,7 +129,6 @@ class HueBleConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
CONF_NAME: self._discovery_info.name,
|
||||
CONF_MAC: self._discovery_info.address,
|
||||
"url_pairing_mode": URL_PAIRING_MODE,
|
||||
"url_factory_reset": URL_FACTORY_RESET,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@@ -2,4 +2,3 @@
|
||||
|
||||
DOMAIN = "hue_ble"
|
||||
URL_PAIRING_MODE = "https://www.home-assistant.io/integrations/hue_ble#initial-setup"
|
||||
URL_FACTORY_RESET = "https://www.philips-hue.com/en-gb/support/article/how-to-factory-reset-philips-hue-lights/000004"
|
||||
|
||||
@@ -113,7 +113,7 @@ class HueBLELight(LightEntity):
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Fetch latest state from light and make available via properties."""
|
||||
await self._api.poll_state()
|
||||
await self._api.poll_state(run_callbacks=True)
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Set properties then turn the light on."""
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["bleak", "HueBLE"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["HueBLE==2.1.0"]
|
||||
"requirements": ["HueBLE==1.0.8"]
|
||||
}
|
||||
|
||||
@@ -14,7 +14,7 @@
|
||||
},
|
||||
"step": {
|
||||
"confirm": {
|
||||
"description": "Do you want to set up {name} ({mac})?. Make sure the light is [made discoverable to voice assistants]({url_pairing_mode}) or has been [factory reset]({url_factory_reset})."
|
||||
"description": "Do you want to set up {name} ({mac})?. Make sure the light is [made discoverable to voice assistants]({url_pairing_mode})."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -94,6 +94,8 @@ SERVICE_KNX_EVENT_REGISTER: Final = "event_register"
|
||||
SERVICE_KNX_EXPOSURE_REGISTER: Final = "exposure_register"
|
||||
SERVICE_KNX_READ: Final = "read"
|
||||
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY: Final = "data_secure_group_key_issue"
|
||||
|
||||
|
||||
class KNXConfigEntryData(TypedDict, total=False):
|
||||
"""Config entry for the KNX integration."""
|
||||
@@ -162,8 +164,11 @@ SUPPORTED_PLATFORMS_UI: Final = {
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.COVER,
|
||||
Platform.DATE,
|
||||
Platform.DATETIME,
|
||||
Platform.LIGHT,
|
||||
Platform.SWITCH,
|
||||
Platform.TIME,
|
||||
}
|
||||
|
||||
# Map KNX controller modes to HA modes. This list might not be complete.
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date as dt_date
|
||||
from typing import Any
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.devices import DateDevice as XknxDateDevice
|
||||
from xknx.dpt.dpt_11 import KNXDate as XKNXDate
|
||||
|
||||
@@ -18,7 +18,10 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -26,11 +29,14 @@ from .const import (
|
||||
CONF_RESPOND_TO_READ,
|
||||
CONF_STATE_ADDRESS,
|
||||
CONF_SYNC_STATE,
|
||||
DOMAIN,
|
||||
KNX_ADDRESS,
|
||||
KNX_MODULE_KEY,
|
||||
)
|
||||
from .entity import KnxYamlEntity
|
||||
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
|
||||
from .knx_module import KNXModule
|
||||
from .storage.const import CONF_ENTITY, CONF_GA_DATE
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -40,40 +46,36 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up entities for KNX platform."""
|
||||
knx_module = hass.data[KNX_MODULE_KEY]
|
||||
config: list[ConfigType] = knx_module.config_yaml[Platform.DATE]
|
||||
|
||||
async_add_entities(
|
||||
KNXDateEntity(knx_module, entity_config) for entity_config in config
|
||||
platform = async_get_current_platform()
|
||||
knx_module.config_store.add_platform(
|
||||
platform=Platform.DATE,
|
||||
controller=KnxUiEntityPlatformController(
|
||||
knx_module=knx_module,
|
||||
entity_platform=platform,
|
||||
entity_class=KnxUiDate,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateDevice:
|
||||
"""Return a XKNX DateTime object to be used within XKNX."""
|
||||
return XknxDateDevice(
|
||||
xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
)
|
||||
entities: list[KnxYamlEntity | KnxUiEntity] = []
|
||||
if yaml_platform_config := knx_module.config_yaml.get(Platform.DATE):
|
||||
entities.extend(
|
||||
KnxYamlDate(knx_module, entity_config)
|
||||
for entity_config in yaml_platform_config
|
||||
)
|
||||
if ui_config := knx_module.config_store.data["entities"].get(Platform.DATE):
|
||||
entities.extend(
|
||||
KnxUiDate(knx_module, unique_id, config)
|
||||
for unique_id, config in ui_config.items()
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class KNXDateEntity(KnxYamlEntity, DateEntity, RestoreEntity):
|
||||
class _KNXDate(DateEntity, RestoreEntity):
|
||||
"""Representation of a KNX date."""
|
||||
|
||||
_device: XknxDateDevice
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX time."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=_create_xknx_device(knx_module.xknx, config),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -94,3 +96,52 @@ class KNXDateEntity(KnxYamlEntity, DateEntity, RestoreEntity):
|
||||
async def async_set_value(self, value: dt_date) -> None:
|
||||
"""Change the value."""
|
||||
await self._device.set(value)
|
||||
|
||||
|
||||
class KnxYamlDate(_KNXDate, KnxYamlEntity):
|
||||
"""Representation of a KNX date configured from YAML."""
|
||||
|
||||
_device: XknxDateDevice
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX date."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=XknxDateDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
|
||||
class KnxUiDate(_KNXDate, KnxUiEntity):
|
||||
"""Representation of a KNX date configured from the UI."""
|
||||
|
||||
_device: XknxDateDevice
|
||||
|
||||
def __init__(
|
||||
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize KNX date."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=unique_id,
|
||||
entity_config=config[CONF_ENTITY],
|
||||
)
|
||||
knx_conf = ConfigExtractor(config[DOMAIN])
|
||||
self._device = XknxDateDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_ENTITY][CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=knx_conf.get_write(CONF_GA_DATE),
|
||||
group_address_state=knx_conf.get_state_and_passive(CONF_GA_DATE),
|
||||
respond_to_read=knx_conf.get(CONF_RESPOND_TO_READ),
|
||||
sync_state=knx_conf.get(CONF_SYNC_STATE),
|
||||
)
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Any
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.devices import DateTimeDevice as XknxDateTimeDevice
|
||||
from xknx.dpt.dpt_19 import KNXDateTime as XKNXDateTime
|
||||
|
||||
@@ -18,7 +18,10 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
@@ -27,11 +30,14 @@ from .const import (
|
||||
CONF_RESPOND_TO_READ,
|
||||
CONF_STATE_ADDRESS,
|
||||
CONF_SYNC_STATE,
|
||||
DOMAIN,
|
||||
KNX_ADDRESS,
|
||||
KNX_MODULE_KEY,
|
||||
)
|
||||
from .entity import KnxYamlEntity
|
||||
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
|
||||
from .knx_module import KNXModule
|
||||
from .storage.const import CONF_ENTITY, CONF_GA_DATETIME
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -41,40 +47,36 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up entities for KNX platform."""
|
||||
knx_module = hass.data[KNX_MODULE_KEY]
|
||||
config: list[ConfigType] = knx_module.config_yaml[Platform.DATETIME]
|
||||
|
||||
async_add_entities(
|
||||
KNXDateTimeEntity(knx_module, entity_config) for entity_config in config
|
||||
platform = async_get_current_platform()
|
||||
knx_module.config_store.add_platform(
|
||||
platform=Platform.DATETIME,
|
||||
controller=KnxUiEntityPlatformController(
|
||||
knx_module=knx_module,
|
||||
entity_platform=platform,
|
||||
entity_class=KnxUiDateTime,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxDateTimeDevice:
|
||||
"""Return a XKNX DateTime object to be used within XKNX."""
|
||||
return XknxDateTimeDevice(
|
||||
xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
)
|
||||
entities: list[KnxYamlEntity | KnxUiEntity] = []
|
||||
if yaml_platform_config := knx_module.config_yaml.get(Platform.DATETIME):
|
||||
entities.extend(
|
||||
KnxYamlDateTime(knx_module, entity_config)
|
||||
for entity_config in yaml_platform_config
|
||||
)
|
||||
if ui_config := knx_module.config_store.data["entities"].get(Platform.DATETIME):
|
||||
entities.extend(
|
||||
KnxUiDateTime(knx_module, unique_id, config)
|
||||
for unique_id, config in ui_config.items()
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class KNXDateTimeEntity(KnxYamlEntity, DateTimeEntity, RestoreEntity):
|
||||
class _KNXDateTime(DateTimeEntity, RestoreEntity):
|
||||
"""Representation of a KNX datetime."""
|
||||
|
||||
_device: XknxDateTimeDevice
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX time."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=_create_xknx_device(knx_module.xknx, config),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -99,3 +101,52 @@ class KNXDateTimeEntity(KnxYamlEntity, DateTimeEntity, RestoreEntity):
|
||||
async def async_set_value(self, value: datetime) -> None:
|
||||
"""Change the value."""
|
||||
await self._device.set(value.astimezone(dt_util.get_default_time_zone()))
|
||||
|
||||
|
||||
class KnxYamlDateTime(_KNXDateTime, KnxYamlEntity):
|
||||
"""Representation of a KNX datetime configured from YAML."""
|
||||
|
||||
_device: XknxDateTimeDevice
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX datetime."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=XknxDateTimeDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
|
||||
class KnxUiDateTime(_KNXDateTime, KnxUiEntity):
|
||||
"""Representation of a KNX datetime configured from the UI."""
|
||||
|
||||
_device: XknxDateTimeDevice
|
||||
|
||||
def __init__(
|
||||
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize KNX datetime."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=unique_id,
|
||||
entity_config=config[CONF_ENTITY],
|
||||
)
|
||||
knx_conf = ConfigExtractor(config[DOMAIN])
|
||||
self._device = XknxDateTimeDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_ENTITY][CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=knx_conf.get_write(CONF_GA_DATETIME),
|
||||
group_address_state=knx_conf.get_state_and_passive(CONF_GA_DATETIME),
|
||||
respond_to_read=knx_conf.get(CONF_RESPOND_TO_READ),
|
||||
sync_state=knx_conf.get(CONF_SYNC_STATE),
|
||||
)
|
||||
|
||||
@@ -77,6 +77,11 @@ class _KnxEntityBase(Entity):
|
||||
"""Store register state change callback and start device object."""
|
||||
self._device.register_device_updated_cb(self.after_update_callback)
|
||||
self._device.xknx.devices.async_add(self._device)
|
||||
if uid := self.unique_id:
|
||||
self._knx_module.add_to_group_address_entities(
|
||||
group_addresses=self._device.group_addresses(),
|
||||
identifier=(self.platform_data.domain, uid),
|
||||
)
|
||||
# super call needed to have methods of multi-inherited classes called
|
||||
# eg. for restoring state (like _KNXSwitch)
|
||||
await super().async_added_to_hass()
|
||||
@@ -85,6 +90,11 @@ class _KnxEntityBase(Entity):
|
||||
"""Disconnect device object when removed."""
|
||||
self._device.unregister_device_updated_cb(self.after_update_callback)
|
||||
self._device.xknx.devices.async_remove(self._device)
|
||||
if uid := self.unique_id:
|
||||
self._knx_module.remove_from_group_address_entities(
|
||||
group_addresses=self._device.group_addresses(),
|
||||
identifier=(self.platform_data.domain, uid),
|
||||
)
|
||||
|
||||
|
||||
class KnxYamlEntity(_KnxEntityBase):
|
||||
|
||||
@@ -56,6 +56,7 @@ from .const import (
|
||||
from .device import KNXInterfaceDevice
|
||||
from .expose import KNXExposeSensor, KNXExposeTime
|
||||
from .project import KNXProject
|
||||
from .repairs import data_secure_group_key_issue_dispatcher
|
||||
from .storage.config_store import KNXConfigStore
|
||||
from .telegrams import Telegrams
|
||||
|
||||
@@ -107,8 +108,12 @@ class KNXModule:
|
||||
|
||||
self._address_filter_transcoder: dict[AddressFilter, type[DPTBase]] = {}
|
||||
self.group_address_transcoder: dict[DeviceGroupAddress, type[DPTBase]] = {}
|
||||
self.group_address_entities: dict[
|
||||
DeviceGroupAddress, set[tuple[str, str]] # {(platform, unique_id),}
|
||||
] = {}
|
||||
self.knx_event_callback: TelegramQueue.Callback = self.register_event_callback()
|
||||
|
||||
self.entry.async_on_unload(data_secure_group_key_issue_dispatcher(self))
|
||||
self.entry.async_on_unload(
|
||||
self.hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self.stop)
|
||||
)
|
||||
@@ -225,6 +230,29 @@ class KNXModule:
|
||||
threaded=True,
|
||||
)
|
||||
|
||||
def add_to_group_address_entities(
|
||||
self,
|
||||
group_addresses: set[DeviceGroupAddress],
|
||||
identifier: tuple[str, str], # (platform, unique_id)
|
||||
) -> None:
|
||||
"""Register entity in group_address_entities map."""
|
||||
for ga in group_addresses:
|
||||
if ga not in self.group_address_entities:
|
||||
self.group_address_entities[ga] = set()
|
||||
self.group_address_entities[ga].add(identifier)
|
||||
|
||||
def remove_from_group_address_entities(
|
||||
self,
|
||||
group_addresses: set[DeviceGroupAddress],
|
||||
identifier: tuple[str, str],
|
||||
) -> None:
|
||||
"""Unregister entity from group_address_entities map."""
|
||||
for ga in group_addresses:
|
||||
if ga in self.group_address_entities:
|
||||
self.group_address_entities[ga].discard(identifier)
|
||||
if not self.group_address_entities[ga]:
|
||||
del self.group_address_entities[ga]
|
||||
|
||||
def connection_state_changed_cb(self, state: XknxConnectionState) -> None:
|
||||
"""Call invoked after a KNX connection state change was received."""
|
||||
self.connected = state == XknxConnectionState.CONNECTED
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"xknx==3.11.0",
|
||||
"xknx==3.12.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.10.31.195356"
|
||||
],
|
||||
|
||||
175
homeassistant/components/knx/repairs.py
Normal file
175
homeassistant/components/knx/repairs.py
Normal file
@@ -0,0 +1,175 @@
|
||||
"""Repairs for KNX integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from functools import partial
|
||||
from typing import TYPE_CHECKING, Any, Final
|
||||
|
||||
import voluptuous as vol
|
||||
from xknx.exceptions.exception import InvalidSecureConfiguration
|
||||
from xknx.telegram import GroupAddress, IndividualAddress, Telegram
|
||||
|
||||
from homeassistant import data_entry_flow
|
||||
from homeassistant.components.repairs import RepairsFlow
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import issue_registry as ir, selector
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .knx_module import KNXModule
|
||||
|
||||
from .const import (
|
||||
CONF_KNX_KNXKEY_PASSWORD,
|
||||
DOMAIN,
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
KNXConfigEntryData,
|
||||
)
|
||||
from .storage.keyring import DEFAULT_KNX_KEYRING_FILENAME, save_uploaded_knxkeys_file
|
||||
from .telegrams import SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM, TelegramDict
|
||||
|
||||
CONF_KEYRING_FILE: Final = "knxkeys_file"
|
||||
|
||||
|
||||
async def async_create_fix_flow(
|
||||
hass: HomeAssistant,
|
||||
issue_id: str,
|
||||
data: dict[str, str | int | float | None] | None,
|
||||
) -> RepairsFlow:
|
||||
"""Create flow."""
|
||||
if issue_id == REPAIR_ISSUE_DATA_SECURE_GROUP_KEY:
|
||||
return DataSecureGroupIssueRepairFlow()
|
||||
# If KNX adds confirm-only repairs in the future, this should be changed
|
||||
# to return a ConfirmRepairFlow instead of raising a ValueError
|
||||
raise ValueError(f"unknown repair {issue_id}")
|
||||
|
||||
|
||||
######################
|
||||
# DataSecure key issue
|
||||
######################
|
||||
|
||||
|
||||
@callback
|
||||
def data_secure_group_key_issue_dispatcher(knx_module: KNXModule) -> Callable[[], None]:
|
||||
"""Watcher for DataSecure group key issues."""
|
||||
return async_dispatcher_connect(
|
||||
knx_module.hass,
|
||||
signal=SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM,
|
||||
target=partial(_data_secure_group_key_issue_handler, knx_module),
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _data_secure_group_key_issue_handler(
|
||||
knx_module: KNXModule, telegram: Telegram, telegram_dict: TelegramDict
|
||||
) -> None:
|
||||
"""Handle DataSecure group key issue telegrams."""
|
||||
if telegram.destination_address not in knx_module.group_address_entities:
|
||||
# Only report issues for configured group addresses
|
||||
return
|
||||
|
||||
issue_registry = ir.async_get(knx_module.hass)
|
||||
new_ga = str(telegram.destination_address)
|
||||
new_ia = str(telegram.source_address)
|
||||
new_data = {new_ga: new_ia}
|
||||
|
||||
if existing_issue := issue_registry.async_get_issue(
|
||||
DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY
|
||||
):
|
||||
assert isinstance(existing_issue.data, dict)
|
||||
existing_data: dict[str, str] = existing_issue.data # type: ignore[assignment]
|
||||
if new_ga in existing_data:
|
||||
current_ias = existing_data[new_ga].split(", ")
|
||||
if new_ia in current_ias:
|
||||
return
|
||||
current_ias = sorted([*current_ias, new_ia], key=IndividualAddress)
|
||||
new_data[new_ga] = ", ".join(current_ias)
|
||||
new_data_unsorted = existing_data | new_data
|
||||
new_data = {
|
||||
key: new_data_unsorted[key]
|
||||
for key in sorted(new_data_unsorted, key=GroupAddress)
|
||||
}
|
||||
|
||||
issue_registry.async_get_or_create(
|
||||
DOMAIN,
|
||||
REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
data=new_data, # type: ignore[arg-type]
|
||||
is_fixable=True,
|
||||
is_persistent=True,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key=REPAIR_ISSUE_DATA_SECURE_GROUP_KEY,
|
||||
translation_placeholders={
|
||||
"addresses": "\n".join(
|
||||
f"`{ga}` from {ias}" for ga, ias in new_data.items()
|
||||
),
|
||||
"interface": str(knx_module.xknx.current_address),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
class DataSecureGroupIssueRepairFlow(RepairsFlow):
|
||||
"""Handler for an issue fixing flow for outdated DataSecure keys."""
|
||||
|
||||
@callback
|
||||
def _async_get_placeholders(self) -> dict[str, str]:
|
||||
issue_registry = ir.async_get(self.hass)
|
||||
issue = issue_registry.async_get_issue(self.handler, self.issue_id)
|
||||
assert issue is not None
|
||||
return issue.translation_placeholders or {}
|
||||
|
||||
async def async_step_init(
|
||||
self, user_input: dict[str, str] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Handle the first step of a fix flow."""
|
||||
return await self.async_step_secure_knxkeys()
|
||||
|
||||
async def async_step_secure_knxkeys(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Manage upload of new KNX Keyring file."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
password = user_input[CONF_KNX_KNXKEY_PASSWORD]
|
||||
keyring = None
|
||||
try:
|
||||
keyring = await save_uploaded_knxkeys_file(
|
||||
self.hass,
|
||||
uploaded_file_id=user_input[CONF_KEYRING_FILE],
|
||||
password=password,
|
||||
)
|
||||
except InvalidSecureConfiguration:
|
||||
errors[CONF_KNX_KNXKEY_PASSWORD] = "keyfile_invalid_signature"
|
||||
|
||||
if not errors and keyring:
|
||||
new_entry_data = KNXConfigEntryData(
|
||||
knxkeys_filename=f"{DOMAIN}/{DEFAULT_KNX_KEYRING_FILENAME}",
|
||||
knxkeys_password=password,
|
||||
)
|
||||
return self.finish_flow(new_entry_data)
|
||||
|
||||
fields = {
|
||||
vol.Required(CONF_KEYRING_FILE): selector.FileSelector(
|
||||
config=selector.FileSelectorConfig(accept=".knxkeys")
|
||||
),
|
||||
vol.Required(CONF_KNX_KNXKEY_PASSWORD): selector.TextSelector(),
|
||||
}
|
||||
return self.async_show_form(
|
||||
step_id="secure_knxkeys",
|
||||
data_schema=vol.Schema(fields),
|
||||
description_placeholders=self._async_get_placeholders(),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
@callback
|
||||
def finish_flow(
|
||||
self, new_entry_data: KNXConfigEntryData
|
||||
) -> data_entry_flow.FlowResult:
|
||||
"""Finish the repair flow. Reload the config entry."""
|
||||
knx_config_entries = self.hass.config_entries.async_entries(DOMAIN)
|
||||
if knx_config_entries:
|
||||
config_entry = knx_config_entries[0] # single_config_entry
|
||||
new_data = {**config_entry.data, **new_entry_data}
|
||||
self.hass.config_entries.async_update_entry(config_entry, data=new_data)
|
||||
self.hass.config_entries.async_schedule_reload(config_entry.entry_id)
|
||||
return self.async_create_entry(data={})
|
||||
@@ -13,6 +13,9 @@ CONF_DPT: Final = "dpt"
|
||||
|
||||
CONF_GA_SENSOR: Final = "ga_sensor"
|
||||
CONF_GA_SWITCH: Final = "ga_switch"
|
||||
CONF_GA_DATE: Final = "ga_date"
|
||||
CONF_GA_DATETIME: Final = "ga_datetime"
|
||||
CONF_GA_TIME: Final = "ga_time"
|
||||
|
||||
# Climate
|
||||
CONF_GA_TEMPERATURE_CURRENT: Final = "ga_temperature_current"
|
||||
|
||||
@@ -46,6 +46,8 @@ from .const import (
|
||||
CONF_GA_COLOR_TEMP,
|
||||
CONF_GA_CONTROLLER_MODE,
|
||||
CONF_GA_CONTROLLER_STATUS,
|
||||
CONF_GA_DATE,
|
||||
CONF_GA_DATETIME,
|
||||
CONF_GA_FAN_SPEED,
|
||||
CONF_GA_FAN_SWING,
|
||||
CONF_GA_FAN_SWING_HORIZONTAL,
|
||||
@@ -72,6 +74,7 @@ from .const import (
|
||||
CONF_GA_SWITCH,
|
||||
CONF_GA_TEMPERATURE_CURRENT,
|
||||
CONF_GA_TEMPERATURE_TARGET,
|
||||
CONF_GA_TIME,
|
||||
CONF_GA_UP_DOWN,
|
||||
CONF_GA_VALVE,
|
||||
CONF_GA_WHITE_BRIGHTNESS,
|
||||
@@ -199,6 +202,24 @@ COVER_KNX_SCHEMA = AllSerializeFirst(
|
||||
),
|
||||
)
|
||||
|
||||
DATE_KNX_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_GA_DATE): GASelector(write_required=True, valid_dpt="11.001"),
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): selector.BooleanSelector(),
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): SyncStateSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
DATETIME_KNX_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_GA_DATETIME): GASelector(
|
||||
write_required=True, valid_dpt="19.001"
|
||||
),
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): selector.BooleanSelector(),
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): SyncStateSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@unique
|
||||
class LightColorMode(StrEnum):
|
||||
@@ -336,6 +357,14 @@ SWITCH_KNX_SCHEMA = vol.Schema(
|
||||
},
|
||||
)
|
||||
|
||||
TIME_KNX_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_GA_TIME): GASelector(write_required=True, valid_dpt="10.001"),
|
||||
vol.Optional(CONF_RESPOND_TO_READ, default=False): selector.BooleanSelector(),
|
||||
vol.Optional(CONF_SYNC_STATE, default=True): SyncStateSelector(),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@unique
|
||||
class ConfSetpointShiftMode(StrEnum):
|
||||
@@ -482,8 +511,11 @@ KNX_SCHEMA_FOR_PLATFORM = {
|
||||
Platform.BINARY_SENSOR: BINARY_SENSOR_KNX_SCHEMA,
|
||||
Platform.CLIMATE: CLIMATE_KNX_SCHEMA,
|
||||
Platform.COVER: COVER_KNX_SCHEMA,
|
||||
Platform.DATE: DATE_KNX_SCHEMA,
|
||||
Platform.DATETIME: DATETIME_KNX_SCHEMA,
|
||||
Platform.LIGHT: LIGHT_KNX_SCHEMA,
|
||||
Platform.SWITCH: SWITCH_KNX_SCHEMA,
|
||||
Platform.TIME: TIME_KNX_SCHEMA,
|
||||
}
|
||||
|
||||
ENTITY_STORE_DATA_SCHEMA: VolSchemaType = vol.All(
|
||||
|
||||
@@ -10,9 +10,10 @@ from xknx.secure.keyring import Keyring, sync_load_keyring
|
||||
|
||||
from homeassistant.components.file_upload import process_uploaded_file
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import issue_registry as ir
|
||||
from homeassistant.helpers.storage import STORAGE_DIR
|
||||
|
||||
from ..const import DOMAIN
|
||||
from ..const import DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -45,4 +46,11 @@ async def save_uploaded_knxkeys_file(
|
||||
shutil.move(file_path, dest_file)
|
||||
return keyring
|
||||
|
||||
return await hass.async_add_executor_job(_process_upload)
|
||||
keyring = await hass.async_add_executor_job(_process_upload)
|
||||
|
||||
# If there is an existing DataSecure group key issue, remove it.
|
||||
# GAs might not be DataSecure anymore after uploading a valid keyring,
|
||||
# if they are, we raise the issue again when receiving a telegram.
|
||||
ir.async_delete_issue(hass, DOMAIN, REPAIR_ISSUE_DATA_SECURE_GROUP_KEY)
|
||||
|
||||
return keyring
|
||||
|
||||
@@ -176,6 +176,10 @@
|
||||
"state_address": "State address",
|
||||
"valid_dpts": "Valid DPTs"
|
||||
},
|
||||
"respond_to_read": {
|
||||
"description": "Respond to GroupValueRead telegrams received to the configured send address.",
|
||||
"label": "Respond to read"
|
||||
},
|
||||
"sync_state": {
|
||||
"description": "Actively request state updates from KNX bus for state addresses.",
|
||||
"options": {
|
||||
@@ -438,6 +442,24 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"date": {
|
||||
"description": "The KNX date platform is used as an interface to date objects.",
|
||||
"knx": {
|
||||
"ga_date": {
|
||||
"description": "The group address of the date object.",
|
||||
"label": "Date"
|
||||
}
|
||||
}
|
||||
},
|
||||
"datetime": {
|
||||
"description": "The KNX datetime platform is used as an interface to date and time objects.",
|
||||
"knx": {
|
||||
"ga_datetime": {
|
||||
"description": "The group address of the date and time object.",
|
||||
"label": "Date and time"
|
||||
}
|
||||
}
|
||||
},
|
||||
"header": "Create new entity",
|
||||
"light": {
|
||||
"description": "The KNX light platform is used as an interface to dimming actuators, LED controllers, DALI gateways and similar.",
|
||||
@@ -546,10 +568,15 @@
|
||||
"invert": {
|
||||
"description": "Invert payloads before processing or sending.",
|
||||
"label": "Invert"
|
||||
},
|
||||
"respond_to_read": {
|
||||
"description": "Respond to GroupValueRead telegrams received to the configured send address.",
|
||||
"label": "Respond to read"
|
||||
}
|
||||
}
|
||||
},
|
||||
"time": {
|
||||
"description": "The KNX time platform is used as an interface to time objects.",
|
||||
"knx": {
|
||||
"ga_time": {
|
||||
"description": "The group address of the time object.",
|
||||
"label": "Time"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -644,6 +671,30 @@
|
||||
"message": "Invalid type for `knx.send` service: {type}"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"data_secure_group_key_issue": {
|
||||
"fix_flow": {
|
||||
"error": {
|
||||
"keyfile_invalid_signature": "[%key:component::knx::config::error::keyfile_invalid_signature%]"
|
||||
},
|
||||
"step": {
|
||||
"secure_knxkeys": {
|
||||
"data": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data::knxkeys_password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"knxkeys_file": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_file%]",
|
||||
"knxkeys_password": "[%key:component::knx::config::step::secure_knxkeys::data_description::knxkeys_password%]"
|
||||
},
|
||||
"description": "Telegrams for group addresses used in Home Assistant could not be decrypted because Data Secure keys are missing or invalid:\n\n{addresses}\n\nTo fix this, update the sending devices configurations via ETS and provide an updated KNX Keyring file. Make sure that the group addresses used in Home Assistant are associated with the interface used by Home Assistant (`{interface}` when the issue last occurred).",
|
||||
"title": "Update KNX Keyring"
|
||||
}
|
||||
}
|
||||
},
|
||||
"title": "KNX Data Secure telegrams can't be decrypted"
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
"step": {
|
||||
"communication_settings": {
|
||||
|
||||
@@ -26,6 +26,9 @@ STORAGE_KEY: Final = f"{DOMAIN}/telegrams_history.json"
|
||||
|
||||
# dispatcher signal for KNX interface device triggers
|
||||
SIGNAL_KNX_TELEGRAM: SignalType[Telegram, TelegramDict] = SignalType("knx_telegram")
|
||||
SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM: SignalType[Telegram, TelegramDict] = SignalType(
|
||||
"knx_data_secure_issue_telegram"
|
||||
)
|
||||
|
||||
|
||||
class DecodedTelegramPayload(TypedDict):
|
||||
@@ -74,6 +77,11 @@ class Telegrams:
|
||||
match_for_outgoing=True,
|
||||
)
|
||||
)
|
||||
self._xknx_data_secure_group_key_issue_cb_handle = (
|
||||
xknx.telegram_queue.register_data_secure_group_key_issue_cb(
|
||||
self._xknx_data_secure_group_key_issue_cb,
|
||||
)
|
||||
)
|
||||
self.recent_telegrams: deque[TelegramDict] = deque(maxlen=log_size)
|
||||
self.last_ga_telegrams: dict[str, TelegramDict] = {}
|
||||
|
||||
@@ -107,6 +115,14 @@ class Telegrams:
|
||||
self.last_ga_telegrams[telegram_dict["destination"]] = telegram_dict
|
||||
async_dispatcher_send(self.hass, SIGNAL_KNX_TELEGRAM, telegram, telegram_dict)
|
||||
|
||||
def _xknx_data_secure_group_key_issue_cb(self, telegram: Telegram) -> None:
|
||||
"""Handle telegrams with undecodable data secure payload from xknx."""
|
||||
telegram_dict = self.telegram_to_dict(telegram)
|
||||
self.recent_telegrams.append(telegram_dict)
|
||||
async_dispatcher_send(
|
||||
self.hass, SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM, telegram, telegram_dict
|
||||
)
|
||||
|
||||
def telegram_to_dict(self, telegram: Telegram) -> TelegramDict:
|
||||
"""Convert a Telegram to a dict."""
|
||||
dst_name = ""
|
||||
|
||||
@@ -3,8 +3,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import time as dt_time
|
||||
from typing import Any
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.devices import TimeDevice as XknxTimeDevice
|
||||
from xknx.dpt.dpt_10 import KNXTime as XknxTime
|
||||
|
||||
@@ -18,7 +18,10 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
async_get_current_platform,
|
||||
)
|
||||
from homeassistant.helpers.restore_state import RestoreEntity
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
@@ -26,11 +29,14 @@ from .const import (
|
||||
CONF_RESPOND_TO_READ,
|
||||
CONF_STATE_ADDRESS,
|
||||
CONF_SYNC_STATE,
|
||||
DOMAIN,
|
||||
KNX_ADDRESS,
|
||||
KNX_MODULE_KEY,
|
||||
)
|
||||
from .entity import KnxYamlEntity
|
||||
from .entity import KnxUiEntity, KnxUiEntityPlatformController, KnxYamlEntity
|
||||
from .knx_module import KNXModule
|
||||
from .storage.const import CONF_ENTITY, CONF_GA_TIME
|
||||
from .storage.util import ConfigExtractor
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
@@ -40,40 +46,36 @@ async def async_setup_entry(
|
||||
) -> None:
|
||||
"""Set up entities for KNX platform."""
|
||||
knx_module = hass.data[KNX_MODULE_KEY]
|
||||
config: list[ConfigType] = knx_module.config_yaml[Platform.TIME]
|
||||
|
||||
async_add_entities(
|
||||
KNXTimeEntity(knx_module, entity_config) for entity_config in config
|
||||
platform = async_get_current_platform()
|
||||
knx_module.config_store.add_platform(
|
||||
platform=Platform.TIME,
|
||||
controller=KnxUiEntityPlatformController(
|
||||
knx_module=knx_module,
|
||||
entity_platform=platform,
|
||||
entity_class=KnxUiTime,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def _create_xknx_device(xknx: XKNX, config: ConfigType) -> XknxTimeDevice:
|
||||
"""Return a XKNX DateTime object to be used within XKNX."""
|
||||
return XknxTimeDevice(
|
||||
xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
)
|
||||
entities: list[KnxYamlEntity | KnxUiEntity] = []
|
||||
if yaml_platform_config := knx_module.config_yaml.get(Platform.TIME):
|
||||
entities.extend(
|
||||
KnxYamlTime(knx_module, entity_config)
|
||||
for entity_config in yaml_platform_config
|
||||
)
|
||||
if ui_config := knx_module.config_store.data["entities"].get(Platform.TIME):
|
||||
entities.extend(
|
||||
KnxUiTime(knx_module, unique_id, config)
|
||||
for unique_id, config in ui_config.items()
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class KNXTimeEntity(KnxYamlEntity, TimeEntity, RestoreEntity):
|
||||
class _KNXTime(TimeEntity, RestoreEntity):
|
||||
"""Representation of a KNX time."""
|
||||
|
||||
_device: XknxTimeDevice
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX time."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=_create_xknx_device(knx_module.xknx, config),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Restore last state."""
|
||||
await super().async_added_to_hass()
|
||||
@@ -94,3 +96,52 @@ class KNXTimeEntity(KnxYamlEntity, TimeEntity, RestoreEntity):
|
||||
async def async_set_value(self, value: dt_time) -> None:
|
||||
"""Change the value."""
|
||||
await self._device.set(value)
|
||||
|
||||
|
||||
class KnxYamlTime(_KNXTime, KnxYamlEntity):
|
||||
"""Representation of a KNX time configured from YAML."""
|
||||
|
||||
_device: XknxTimeDevice
|
||||
|
||||
def __init__(self, knx_module: KNXModule, config: ConfigType) -> None:
|
||||
"""Initialize a KNX time."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
device=XknxTimeDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=config[KNX_ADDRESS],
|
||||
group_address_state=config.get(CONF_STATE_ADDRESS),
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
sync_state=config[CONF_SYNC_STATE],
|
||||
),
|
||||
)
|
||||
self._attr_entity_category = config.get(CONF_ENTITY_CATEGORY)
|
||||
self._attr_unique_id = str(self._device.remote_value.group_address)
|
||||
|
||||
|
||||
class KnxUiTime(_KNXTime, KnxUiEntity):
|
||||
"""Representation of a KNX time configured from the UI."""
|
||||
|
||||
_device: XknxTimeDevice
|
||||
|
||||
def __init__(
|
||||
self, knx_module: KNXModule, unique_id: str, config: dict[str, Any]
|
||||
) -> None:
|
||||
"""Initialize KNX time."""
|
||||
super().__init__(
|
||||
knx_module=knx_module,
|
||||
unique_id=unique_id,
|
||||
entity_config=config[CONF_ENTITY],
|
||||
)
|
||||
knx_conf = ConfigExtractor(config[DOMAIN])
|
||||
self._device = XknxTimeDevice(
|
||||
knx_module.xknx,
|
||||
name=config[CONF_ENTITY][CONF_NAME],
|
||||
localtime=False,
|
||||
group_address=knx_conf.get_write(CONF_GA_TIME),
|
||||
group_address_state=knx_conf.get_state_and_passive(CONF_GA_TIME),
|
||||
respond_to_read=knx_conf.get(CONF_RESPOND_TO_READ),
|
||||
sync_state=knx_conf.get(CONF_SYNC_STATE),
|
||||
)
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Awaitable, Callable
|
||||
from contextlib import ExitStack
|
||||
from functools import wraps
|
||||
import inspect
|
||||
from typing import TYPE_CHECKING, Any, Final, overload
|
||||
@@ -34,7 +35,11 @@ from .storage.entity_store_validation import (
|
||||
validate_entity_data,
|
||||
)
|
||||
from .storage.serialize import get_serialized_schema
|
||||
from .telegrams import SIGNAL_KNX_TELEGRAM, TelegramDict
|
||||
from .telegrams import (
|
||||
SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM,
|
||||
SIGNAL_KNX_TELEGRAM,
|
||||
TelegramDict,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .knx_module import KNXModule
|
||||
@@ -334,11 +339,23 @@ def ws_subscribe_telegram(
|
||||
telegram_dict,
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_dispatcher_connect(
|
||||
hass,
|
||||
signal=SIGNAL_KNX_TELEGRAM,
|
||||
target=forward_telegram,
|
||||
stack = ExitStack()
|
||||
stack.callback(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
signal=SIGNAL_KNX_TELEGRAM,
|
||||
target=forward_telegram,
|
||||
)
|
||||
)
|
||||
stack.callback(
|
||||
async_dispatcher_connect(
|
||||
hass,
|
||||
signal=SIGNAL_KNX_DATA_SECURE_ISSUE_TELEGRAM,
|
||||
target=forward_telegram,
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = stack.close
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
|
||||
@@ -7,10 +7,11 @@ in the Home Assistant Labs UI for users to enable or disable.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.storage import Store
|
||||
@@ -18,7 +19,6 @@ from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .helpers import async_is_preview_feature_enabled, async_listen
|
||||
from .models import (
|
||||
EventLabsUpdatedData,
|
||||
LabPreviewFeature,
|
||||
@@ -135,3 +135,55 @@ async def _async_scan_all_preview_features(
|
||||
|
||||
_LOGGER.debug("Loaded %d total lab preview features", len(preview_features))
|
||||
return preview_features
|
||||
|
||||
|
||||
@callback
|
||||
def async_is_preview_feature_enabled(
|
||||
hass: HomeAssistant, domain: str, preview_feature: str
|
||||
) -> bool:
|
||||
"""Check if a lab preview feature is enabled.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
|
||||
Returns:
|
||||
True if the preview feature is enabled, False otherwise
|
||||
"""
|
||||
if LABS_DATA not in hass.data:
|
||||
return False
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Callback to invoke when the preview feature is toggled
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
"""Helper functions for the Home Assistant Labs integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
|
||||
from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@callback
|
||||
def async_is_preview_feature_enabled(
|
||||
hass: HomeAssistant, domain: str, preview_feature: str
|
||||
) -> bool:
|
||||
"""Check if a lab preview feature is enabled.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
|
||||
Returns:
|
||||
True if the preview feature is enabled, False otherwise
|
||||
"""
|
||||
if LABS_DATA not in hass.data:
|
||||
return False
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Callback to invoke when the preview feature is toggled
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
@@ -12,7 +12,6 @@ from homeassistant.const import EVENT_LABS_UPDATED
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .helpers import async_is_preview_feature_enabled, async_listen
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@@ -21,7 +20,6 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the number websocket API."""
|
||||
websocket_api.async_register_command(hass, websocket_list_preview_features)
|
||||
websocket_api.async_register_command(hass, websocket_update_preview_feature)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_feature)
|
||||
|
||||
|
||||
@callback
|
||||
@@ -110,52 +108,3 @@ async def websocket_update_preview_feature(
|
||||
hass.bus.async_fire(EVENT_LABS_UPDATED, event_data)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "labs/subscribe",
|
||||
vol.Required("domain"): str,
|
||||
vol.Required("preview_feature"): str,
|
||||
}
|
||||
)
|
||||
def websocket_subscribe_feature(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a specific lab preview feature updates."""
|
||||
domain = msg["domain"]
|
||||
preview_feature_key = msg["preview_feature"]
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
|
||||
preview_feature_id = f"{domain}.{preview_feature_key}"
|
||||
|
||||
if preview_feature_id not in labs_data.preview_features:
|
||||
connection.send_error(
|
||||
msg["id"],
|
||||
websocket_api.ERR_NOT_FOUND,
|
||||
f"Preview feature {preview_feature_id} not found",
|
||||
)
|
||||
return
|
||||
|
||||
preview_feature = labs_data.preview_features[preview_feature_id]
|
||||
|
||||
@callback
|
||||
def send_event() -> None:
|
||||
"""Send feature state to client."""
|
||||
enabled = async_is_preview_feature_enabled(hass, domain, preview_feature_key)
|
||||
connection.send_message(
|
||||
websocket_api.event_message(
|
||||
msg["id"],
|
||||
preview_feature.to_dict(enabled=enabled),
|
||||
)
|
||||
)
|
||||
|
||||
connection.subscriptions[msg["id"]] = async_listen(
|
||||
hass, domain, preview_feature_key, send_event
|
||||
)
|
||||
|
||||
connection.send_result(msg["id"])
|
||||
send_event()
|
||||
|
||||
@@ -35,6 +35,7 @@ from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_INSTALLATION_KEY, CONF_USE_BLUETOOTH, DOMAIN
|
||||
from .coordinator import (
|
||||
LaMarzoccoBluetoothUpdateCoordinator,
|
||||
LaMarzoccoConfigEntry,
|
||||
LaMarzoccoConfigUpdateCoordinator,
|
||||
LaMarzoccoRuntimeData,
|
||||
@@ -72,38 +73,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
client=create_client_session(hass),
|
||||
)
|
||||
|
||||
try:
|
||||
settings = await cloud_client.get_thing_settings(serial)
|
||||
except AuthFail as ex:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from ex
|
||||
except (RequestNotSuccessful, TimeoutError) as ex:
|
||||
_LOGGER.debug(ex, exc_info=True)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN, translation_key="api_error"
|
||||
) from ex
|
||||
|
||||
gateway_version = version.parse(
|
||||
settings.firmwares[FirmwareType.GATEWAY].build_version
|
||||
)
|
||||
|
||||
if gateway_version < version.parse("v5.0.9"):
|
||||
# incompatible gateway firmware, create an issue
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"unsupported_gateway_firmware",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="unsupported_gateway_firmware",
|
||||
translation_placeholders={"gateway_version": str(gateway_version)},
|
||||
)
|
||||
|
||||
# initialize Bluetooth
|
||||
bluetooth_client: LaMarzoccoBluetoothClient | None = None
|
||||
if entry.options.get(CONF_USE_BLUETOOTH, True) and (
|
||||
token := (entry.data.get(CONF_TOKEN) or settings.ble_auth_token)
|
||||
token := entry.data.get(CONF_TOKEN)
|
||||
):
|
||||
if CONF_MAC not in entry.data:
|
||||
for discovery_info in async_discovered_service_info(hass):
|
||||
@@ -145,6 +118,44 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
_LOGGER.info(
|
||||
"Bluetooth device not found during lamarzocco setup, continuing with cloud only"
|
||||
)
|
||||
try:
|
||||
settings = await cloud_client.get_thing_settings(serial)
|
||||
except AuthFail as ex:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="authentication_failed"
|
||||
) from ex
|
||||
except (RequestNotSuccessful, TimeoutError) as ex:
|
||||
_LOGGER.debug(ex, exc_info=True)
|
||||
if not bluetooth_client:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN, translation_key="api_error"
|
||||
) from ex
|
||||
_LOGGER.debug("Cloud failed, continuing with Bluetooth only", exc_info=True)
|
||||
else:
|
||||
gateway_version = version.parse(
|
||||
settings.firmwares[FirmwareType.GATEWAY].build_version
|
||||
)
|
||||
|
||||
if gateway_version < version.parse("v5.0.9"):
|
||||
# incompatible gateway firmware, create an issue
|
||||
ir.async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"unsupported_gateway_firmware",
|
||||
is_fixable=False,
|
||||
severity=ir.IssueSeverity.ERROR,
|
||||
translation_key="unsupported_gateway_firmware",
|
||||
translation_placeholders={"gateway_version": str(gateway_version)},
|
||||
)
|
||||
# Update BLE Token if exists
|
||||
if settings.ble_auth_token:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_TOKEN: settings.ble_auth_token,
|
||||
},
|
||||
)
|
||||
|
||||
device = LaMarzoccoMachine(
|
||||
serial_number=entry.unique_id,
|
||||
@@ -153,7 +164,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
)
|
||||
|
||||
coordinators = LaMarzoccoRuntimeData(
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device, cloud_client),
|
||||
LaMarzoccoConfigUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoSettingsUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoScheduleUpdateCoordinator(hass, entry, device),
|
||||
LaMarzoccoStatisticsUpdateCoordinator(hass, entry, device),
|
||||
@@ -166,6 +177,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) -
|
||||
coordinators.statistics_coordinator.async_config_entry_first_refresh(),
|
||||
)
|
||||
|
||||
# bt coordinator only if bluetooth client is available
|
||||
# and after the initial refresh of the config coordinator
|
||||
# to fetch only if the others failed
|
||||
if bluetooth_client:
|
||||
bluetooth_coordinator = LaMarzoccoBluetoothUpdateCoordinator(
|
||||
hass, entry, device
|
||||
)
|
||||
await bluetooth_coordinator.async_config_entry_first_refresh()
|
||||
coordinators.bluetooth_coordinator = bluetooth_coordinator
|
||||
|
||||
entry.runtime_data = coordinators
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user