mirror of
https://github.com/home-assistant/core.git
synced 2025-12-06 07:58:08 +00:00
Compare commits
183 Commits
labs_helpe
...
negative_r
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9fc2b6fe43 | ||
|
|
5b056a83d4 | ||
|
|
02a70123c1 | ||
|
|
5f6d2f537a | ||
|
|
5e04e9f04d | ||
|
|
56515ad7b5 | ||
|
|
a1fe2bf4fa | ||
|
|
b8fa8efd91 | ||
|
|
03557b5ef2 | ||
|
|
dafec8ce58 | ||
|
|
6ff3f74347 | ||
|
|
ddd8cf7fde | ||
|
|
1356eea52f | ||
|
|
6188e0e39b | ||
|
|
699fa1617d | ||
|
|
449f0fa5a5 | ||
|
|
2e008d2bb7 | ||
|
|
05dec2619d | ||
|
|
25a6778ba8 | ||
|
|
f564b8cb44 | ||
|
|
ce6bfdebfc | ||
|
|
f00a944ac1 | ||
|
|
3073a99ce6 | ||
|
|
8b04ce1328 | ||
|
|
39f76787ab | ||
|
|
e8acced335 | ||
|
|
758a30eebc | ||
|
|
faf94bea24 | ||
|
|
ff91c57228 | ||
|
|
3d2b506997 | ||
|
|
d3c1c28605 | ||
|
|
d4e1f7741d | ||
|
|
e713632eed | ||
|
|
060ad35ddc | ||
|
|
6c5dba40cd | ||
|
|
a04d595424 | ||
|
|
fe85eaf2a2 | ||
|
|
3551c4b01f | ||
|
|
e7edd51a65 | ||
|
|
0c4f2326ef | ||
|
|
81f4456d7c | ||
|
|
2b608bf15c | ||
|
|
972ed4b27f | ||
|
|
23c167da1b | ||
|
|
34d6938171 | ||
|
|
4bb8590076 | ||
|
|
5e0923b60d | ||
|
|
ad48f3c634 | ||
|
|
2bdd6854eb | ||
|
|
0bf906911c | ||
|
|
874d6f5613 | ||
|
|
43ba10eebd | ||
|
|
64bed19805 | ||
|
|
6357067f0f | ||
|
|
e328ba4045 | ||
|
|
332dbddce6 | ||
|
|
82d935a819 | ||
|
|
4b84998c0c | ||
|
|
e10c1ebcf6 | ||
|
|
0174bad182 | ||
|
|
d5be623684 | ||
|
|
d006b044c8 | ||
|
|
fdd9571623 | ||
|
|
4f4c5152b9 | ||
|
|
b031a082cd | ||
|
|
a1132195fd | ||
|
|
708b3dc8b2 | ||
|
|
8ae0216135 | ||
|
|
1472281cd5 | ||
|
|
ceaa71d198 | ||
|
|
7f0d0c555a | ||
|
|
3b94b2491a | ||
|
|
8c8708d5bc | ||
|
|
ca35102138 | ||
|
|
1a1b50ef1a | ||
|
|
5a4d51e57a | ||
|
|
9e1bc637e2 | ||
|
|
ab879c07ca | ||
|
|
488c97531e | ||
|
|
3b52c5df79 | ||
|
|
7f4b56104d | ||
|
|
ab8135ba1a | ||
|
|
a88599bc09 | ||
|
|
45034279c8 | ||
|
|
9f3dae6254 | ||
|
|
ef36d7b1e5 | ||
|
|
e5346ba017 | ||
|
|
68d41d2a48 | ||
|
|
00a882c20a | ||
|
|
44a6772947 | ||
|
|
f874ba1355 | ||
|
|
4fc125c49a | ||
|
|
8c59196e19 | ||
|
|
326f7f0559 | ||
|
|
11afda8c22 | ||
|
|
f1ee0e4ac9 | ||
|
|
5f522e5afa | ||
|
|
4f6624d0aa | ||
|
|
70990645a7 | ||
|
|
2f7d74ff62 | ||
|
|
885667832b | ||
|
|
4646929987 | ||
|
|
010aea952c | ||
|
|
563678dc47 | ||
|
|
a48f01f213 | ||
|
|
08b758b0d2 | ||
|
|
4306fbea52 | ||
|
|
6f4c479f8f | ||
|
|
1d9c06264e | ||
|
|
d045ecaf13 | ||
|
|
f7c41e694c | ||
|
|
9ee7ed5cdb | ||
|
|
83c4e2abc9 | ||
|
|
a7dbf551a3 | ||
|
|
0b2bb9f6bf | ||
|
|
0769163b67 | ||
|
|
2bb51e1146 | ||
|
|
d2248d282c | ||
|
|
8fe79a88ca | ||
|
|
7a328539b2 | ||
|
|
ec69efee4d | ||
|
|
dbcde549d4 | ||
|
|
988355e138 | ||
|
|
7711eac607 | ||
|
|
32fe53cceb | ||
|
|
3a65d3c0dc | ||
|
|
7fe26223ac | ||
|
|
7e8496afb2 | ||
|
|
2ec5190243 | ||
|
|
a706db8fdb | ||
|
|
a00923c48b | ||
|
|
7480d59f0f | ||
|
|
4c8d9ed401 | ||
|
|
eef10c59db | ||
|
|
a1a1f8dd77 | ||
|
|
c75a5c5151 | ||
|
|
cdaaa2bd8f | ||
|
|
bd84dac8fb | ||
|
|
42cbeca5b0 | ||
|
|
ad0a498d10 | ||
|
|
973405822b | ||
|
|
b883d2f519 | ||
|
|
4654d6de87 | ||
|
|
990c8cd4e6 | ||
|
|
f8c76f42e3 | ||
|
|
21d914c8ca | ||
|
|
ec77add1a6 | ||
|
|
ef3b7dfd1d | ||
|
|
51241d963d | ||
|
|
7c48e6e046 | ||
|
|
38d8da4279 | ||
|
|
3396a72fa8 | ||
|
|
2d26ab390e | ||
|
|
1bf5bc9323 | ||
|
|
87ea96a3e0 | ||
|
|
e3cf65510b | ||
|
|
f69fce68d6 | ||
|
|
f758cfa82f | ||
|
|
9c7a928b29 | ||
|
|
405a9948a2 | ||
|
|
0e3bab3ce4 | ||
|
|
4900d25ac8 | ||
|
|
ea10cdb4b0 | ||
|
|
6baf77d256 | ||
|
|
13bc0ebed8 | ||
|
|
611af9c832 | ||
|
|
c2b7a63dd9 | ||
|
|
550716a753 | ||
|
|
56a71e6798 | ||
|
|
80ec51c56b | ||
|
|
ea651c4a22 | ||
|
|
ff40ce419e | ||
|
|
d95308719c | ||
|
|
f4fb95ee43 | ||
|
|
14d95cc86b | ||
|
|
4257435975 | ||
|
|
a6aab088fb | ||
|
|
655a63c104 | ||
|
|
a2ade413c2 | ||
|
|
10299b2ef4 | ||
|
|
26444d8d34 | ||
|
|
554c122a37 | ||
|
|
1c0dd02a7c |
158
.github/workflows/builder.yml
vendored
158
.github/workflows/builder.yml
vendored
@@ -14,7 +14,9 @@ env:
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
BASE_IMAGE_VERSION: "2025.11.0"
|
||||
# Base image version from https://github.com/home-assistant/docker
|
||||
BASE_IMAGE_VERSION: "2025.11.3"
|
||||
ARCHITECTURES: '["amd64", "aarch64"]'
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -25,6 +27,7 @@ jobs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
architectures: ${{ env.ARCHITECTURES }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
@@ -85,7 +88,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ["amd64", "aarch64"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
@@ -187,7 +190,8 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install Cosign
|
||||
- &install_cosign
|
||||
name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
@@ -291,7 +295,7 @@ jobs:
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
uses: home-assistant/builder@2025.11.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -350,13 +354,7 @@ jobs:
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
- *install_cosign
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -366,88 +364,94 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Meta Image
|
||||
- name: Verify architecture image signatures
|
||||
shell: bash
|
||||
run: |
|
||||
export DOCKER_CLI_EXPERIMENTAL=enabled
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Verifying ${arch} image signature..."
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
echo "✓ All images verified successfully"
|
||||
|
||||
function create_manifest() {
|
||||
local tag_l=${1}
|
||||
local tag_r=${2}
|
||||
local registry=${{ matrix.registry }}
|
||||
# Generate all Docker tags based on version string
|
||||
# Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
|
||||
# Examples:
|
||||
# 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
|
||||
# 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
|
||||
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
|
||||
- name: Generate Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ matrix.registry }}/home-assistant
|
||||
sep-tags: ","
|
||||
tags: |
|
||||
type=raw,value=${{ needs.init.outputs.version }},priority=9999
|
||||
type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
shell: bash
|
||||
run: |
|
||||
# Use imagetools to copy image blobs directly between registries
|
||||
# This preserves provenance/attestations and seems to be much faster than pull/push
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Copying ${arch} image to DockerHub..."
|
||||
docker buildx imagetools create \
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
- name: Create and push multi-arch manifests
|
||||
shell: bash
|
||||
run: |
|
||||
# Build list of architecture images dynamically
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
ARCH_IMAGES=()
|
||||
for arch in $ARCHS; do
|
||||
ARCH_IMAGES+=("${{ matrix.registry }}/${arch}-homeassistant:${{ needs.init.outputs.version }}")
|
||||
done
|
||||
|
||||
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
|
||||
cosign sign --yes "${registry}/home-assistant:${tag_l}"
|
||||
}
|
||||
# Build list of all tags for single manifest creation
|
||||
# Note: Using sep-tags=',' in metadata-action for easier parsing
|
||||
TAG_ARGS=()
|
||||
IFS=',' read -ra TAGS <<< "${{ steps.meta.outputs.tags }}"
|
||||
for tag in "${TAGS[@]}"; do
|
||||
TAG_ARGS+=("--tag" "${tag}")
|
||||
done
|
||||
|
||||
function validate_image() {
|
||||
local image=${1}
|
||||
if ! cosign verify --certificate-oidc-issuer https://token.actions.githubusercontent.com --certificate-identity-regexp https://github.com/home-assistant/core/.* "${image}"; then
|
||||
echo "Invalid signature!"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
# Create manifest with ALL tags in a single operation (much faster!)
|
||||
echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
|
||||
docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
|
||||
|
||||
function push_dockerhub() {
|
||||
local image=${1}
|
||||
local tag=${2}
|
||||
# Sign each tag separately (signing requires individual tag names)
|
||||
echo "Signing all tags..."
|
||||
for tag in "${TAGS[@]}"; do
|
||||
echo "Signing ${tag}"
|
||||
cosign sign --yes "${tag}"
|
||||
done
|
||||
|
||||
docker tag "ghcr.io/home-assistant/${image}:${tag}" "docker.io/homeassistant/${image}:${tag}"
|
||||
docker push "docker.io/homeassistant/${image}:${tag}"
|
||||
cosign sign --yes "docker.io/homeassistant/${image}:${tag}"
|
||||
}
|
||||
|
||||
# Pull images from github container registry and verify signature
|
||||
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
# Create version tag
|
||||
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create general tags
|
||||
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
|
||||
create_manifest "dev" "${{ needs.init.outputs.version }}"
|
||||
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
|
||||
create_manifest "beta" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "rc" "${{ needs.init.outputs.version }}"
|
||||
else
|
||||
create_manifest "stable" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "latest" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "beta" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "rc" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create series version tag (e.g. 2021.6)
|
||||
v="${{ needs.init.outputs.version }}"
|
||||
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
echo "All manifests created and signed successfully"
|
||||
|
||||
build_python:
|
||||
name: Build PyPi package
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.12"
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
||||
# 10.3 is the oldest supported version
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
uses: actions/ai-inference@334892bb203895caaed82ec52d23c1ed9385151e # v2.0.4
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
@@ -187,6 +187,7 @@ homeassistant.components.elkm1.*
|
||||
homeassistant.components.emulated_hue.*
|
||||
homeassistant.components.energenie_power_sockets.*
|
||||
homeassistant.components.energy.*
|
||||
homeassistant.components.energyid.*
|
||||
homeassistant.components.energyzero.*
|
||||
homeassistant.components.enigma2.*
|
||||
homeassistant.components.enphase_envoy.*
|
||||
|
||||
4
CODEOWNERS
generated
4
CODEOWNERS
generated
@@ -452,6 +452,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/energenie_power_sockets/ @gnumpi
|
||||
/homeassistant/components/energy/ @home-assistant/core
|
||||
/tests/components/energy/ @home-assistant/core
|
||||
/homeassistant/components/energyid/ @JrtPec @Molier
|
||||
/tests/components/energyid/ @JrtPec @Molier
|
||||
/homeassistant/components/energyzero/ @klaasnicolaas
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
@@ -537,6 +539,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/freebox/ @hacf-fr @Quentame
|
||||
/homeassistant/components/freedompro/ @stefano055415
|
||||
/tests/components/freedompro/ @stefano055415
|
||||
/homeassistant/components/fressnapf_tracker/ @eifinger
|
||||
/tests/components/fressnapf_tracker/ @eifinger
|
||||
/homeassistant/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/tests/components/fritz/ @AaronDavidSchneider @chemelli74 @mib1185
|
||||
/homeassistant/components/fritzbox/ @mib1185 @flabbamann
|
||||
|
||||
@@ -35,25 +35,22 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
USER vscode
|
||||
|
||||
COPY .python-version ./
|
||||
RUN uv python install
|
||||
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
RUN uv venv $VIRTUAL_ENV
|
||||
RUN --mount=type=bind,source=.python-version,target=.python-version \
|
||||
uv python install \
|
||||
&& uv venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
|
||||
&& uv pip install -e ~/hass-release/
|
||||
|
||||
# Install Python dependencies from requirements
|
||||
COPY requirements.txt ./
|
||||
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||
RUN uv pip install -r requirements.txt
|
||||
COPY requirements_test.txt requirements_test_pre_commit.txt ./
|
||||
RUN uv pip install -r requirements_test.txt
|
||||
RUN --mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=homeassistant/package_constraints.txt,target=homeassistant/package_constraints.txt \
|
||||
--mount=type=bind,source=requirements_test.txt,target=requirements_test.txt \
|
||||
--mount=type=bind,source=requirements_test_pre_commit.txt,target=requirements_test_pre_commit.txt \
|
||||
uv pip install -r requirements.txt -r requirements_test.txt
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
|
||||
@@ -1000,7 +1000,7 @@ class _WatchPendingSetups:
|
||||
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
|
||||
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
||||
_LOGGER.warning(
|
||||
"Waiting on integrations to complete setup: %s",
|
||||
"Waiting for integrations to complete setup: %s",
|
||||
self._setup_started,
|
||||
)
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -174,6 +175,56 @@ class AirobotConfigFlow(BaseConfigFlow, domain=DOMAIN):
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauthentication upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
# Combine existing data with new password
|
||||
data = {
|
||||
CONF_HOST: reauth_entry.data[CONF_HOST],
|
||||
CONF_USERNAME: reauth_entry.data[CONF_USERNAME],
|
||||
CONF_PASSWORD: user_input[CONF_PASSWORD],
|
||||
}
|
||||
|
||||
try:
|
||||
await validate_input(self.hass, data)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidAuth:
|
||||
errors["base"] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data_updates={CONF_PASSWORD: user_input[CONF_PASSWORD]},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): str,
|
||||
}
|
||||
),
|
||||
description_placeholders={
|
||||
"username": reauth_entry.data[CONF_USERNAME],
|
||||
"host": reauth_entry.data[CONF_HOST],
|
||||
},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
|
||||
class CannotConnect(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect."""
|
||||
|
||||
@@ -11,6 +11,7 @@ from pyairobotrest.exceptions import AirobotAuthError, AirobotConnectionError
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
@@ -53,7 +54,15 @@ class AirobotDataUpdateCoordinator(DataUpdateCoordinator[AirobotData]):
|
||||
try:
|
||||
status = await self.client.get_statuses()
|
||||
settings = await self.client.get_settings()
|
||||
except (AirobotAuthError, AirobotConnectionError) as err:
|
||||
raise UpdateFailed(f"Failed to communicate with device: {err}") from err
|
||||
except AirobotAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="authentication_failed",
|
||||
) from err
|
||||
except AirobotConnectionError as err:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_failed",
|
||||
) from err
|
||||
|
||||
return AirobotData(status=status, settings=settings)
|
||||
|
||||
@@ -12,6 +12,6 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyairobotrest"],
|
||||
"quality_scale": "bronze",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["pyairobotrest==0.1.0"]
|
||||
}
|
||||
|
||||
@@ -34,7 +34,7 @@ rules:
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
@@ -14,15 +15,24 @@
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "The thermostat password."
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "Airobot thermostat {device_id} discovered at {host}. Enter the password to complete setup. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"password": "[%key:common::config_flow::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::airobot::config::step::user::data_description::password%]"
|
||||
},
|
||||
"description": "The authentication for Airobot thermostat at {host} (Device ID: {username}) has expired. Please enter the password to reauthenticate. Find the password in the thermostat settings menu under Connectivity → Mobile app."
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
"username": "Device ID"
|
||||
},
|
||||
"data_description": {
|
||||
"host": "The hostname or IP address of your Airobot thermostat.",
|
||||
@@ -34,6 +44,12 @@
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"authentication_failed": {
|
||||
"message": "Authentication failed, please reauthenticate."
|
||||
},
|
||||
"connection_failed": {
|
||||
"message": "Failed to communicate with device."
|
||||
},
|
||||
"set_preset_mode_failed": {
|
||||
"message": "Failed to set preset mode to {preset_mode}."
|
||||
},
|
||||
|
||||
@@ -160,7 +160,6 @@
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"description": "Triggers when an alarm is armed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
@@ -171,7 +170,6 @@
|
||||
},
|
||||
"armed_away": {
|
||||
"description": "Triggers when an alarm is armed away.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_away::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
@@ -182,7 +180,6 @@
|
||||
},
|
||||
"armed_home": {
|
||||
"description": "Triggers when an alarm is armed home.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_home::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
@@ -193,7 +190,6 @@
|
||||
},
|
||||
"armed_night": {
|
||||
"description": "Triggers when an alarm is armed night.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_night::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
@@ -204,7 +200,6 @@
|
||||
},
|
||||
"armed_vacation": {
|
||||
"description": "Triggers when an alarm is armed vacation.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_vacation::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
@@ -215,7 +210,6 @@
|
||||
},
|
||||
"disarmed": {
|
||||
"description": "Triggers when an alarm is disarmed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::disarmed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
@@ -226,7 +220,6 @@
|
||||
},
|
||||
"triggered": {
|
||||
"description": "Triggers when an alarm is triggered.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::triggered::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
SCAN_INTERVAL = 30
|
||||
SCAN_INTERVAL = 300
|
||||
|
||||
type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
|
||||
|
||||
@@ -45,7 +45,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
config_entry=entry,
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=30, immediate=False
|
||||
hass, _LOGGER, cooldown=SCAN_INTERVAL, immediate=False
|
||||
),
|
||||
)
|
||||
self.api = AmazonEchoApi(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==9.0.3"]
|
||||
"requirements": ["aioamazondevices==10.0.0"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.auth import MSOB2CAuth
|
||||
from pyanglianwater.exceptions import (
|
||||
@@ -18,7 +19,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
|
||||
@@ -33,7 +34,10 @@ async def async_setup_entry(
|
||||
auth = MSOB2CAuth(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=async_get_clientsession(hass),
|
||||
session=async_create_clientsession(
|
||||
hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
refresh_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
account_number=entry.data[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
|
||||
@@ -18,17 +18,21 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AnglianWaterEntity(CoordinatorEntity[AnglianWaterUpdateCoordinator]):
|
||||
"""Defines a Anglian Water entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AnglianWaterUpdateCoordinator,
|
||||
smart_meter: SmartMeter,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water entity."""
|
||||
super().__init__(coordinator)
|
||||
self.smart_meter = smart_meter
|
||||
self._attr_unique_id = f"{smart_meter.serial_number}_{key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, smart_meter.serial_number)},
|
||||
name="Smart Water Meter",
|
||||
name=smart_meter.serial_number,
|
||||
manufacturer="Anglian Water",
|
||||
serial_number=smart_meter.serial_number,
|
||||
)
|
||||
|
||||
@@ -108,9 +108,8 @@ class AnglianWaterSensorEntity(AnglianWaterEntity, SensorEntity):
|
||||
description: AnglianWaterSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water sensor."""
|
||||
super().__init__(coordinator, smart_meter)
|
||||
super().__init__(coordinator, smart_meter, description.key)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{smart_meter.serial_number}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"data_description": {
|
||||
"account_number": "Your account number found on your latest bill.",
|
||||
"password": "Your password",
|
||||
"username": "Username or email used to login to the Anglian Water website."
|
||||
"username": "Username or email used to log in to the Anglian Water website."
|
||||
},
|
||||
"description": "Enter your Anglian Water account credentials to connect to Home Assistant."
|
||||
}
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_CHAT_MODEL, DEFAULT, DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
from .const import DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -37,14 +37,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
# Use model from first conversation subentry for validation
|
||||
subentries = list(entry.subentries.values())
|
||||
if subentries:
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
else:
|
||||
model_id = DEFAULT[CONF_CHAT_MODEL]
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
await client.models.list(timeout=10.0)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
|
||||
@@ -421,6 +421,8 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
|
||||
@@ -583,7 +583,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
model=subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL]),
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.73.0"]
|
||||
"requirements": ["anthropic==0.75.0"]
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["zeroconf"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/apple_tv",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyatv", "srptools"],
|
||||
"requirements": ["pyatv==0.16.1;python_version<'3.14'"],
|
||||
|
||||
@@ -1123,63 +1123,6 @@ class PipelineRun:
|
||||
)
|
||||
|
||||
try:
|
||||
user_input = conversation.ConversationInput(
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
agent_id = "sentence_trigger"
|
||||
processed_locally = True
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
if self.tts_stream and self.tts_stream.supports_streaming_input:
|
||||
tts_input_stream: asyncio.Queue[str | None] | None = asyncio.Queue()
|
||||
else:
|
||||
@@ -1265,6 +1208,17 @@ class PipelineRun:
|
||||
assert self.tts_stream is not None
|
||||
self.tts_stream.async_set_message_stream(tts_input_stream_generator())
|
||||
|
||||
user_input = conversation.ConversationInput(
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
self.hass, user_input.conversation_id
|
||||
@@ -1276,6 +1230,53 @@ class PipelineRun:
|
||||
chat_log_delta_listener=chat_log_delta_listener,
|
||||
) as chat_log,
|
||||
):
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input, chat_log
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
agent_id = "sentence_trigger"
|
||||
processed_locally = True
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
chat_log,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
# It was already handled, create response and add to chat history
|
||||
if intent_response is not None:
|
||||
speech: str = intent_response.speech.get("plain", {}).get(
|
||||
|
||||
@@ -113,7 +113,6 @@
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers when an Assist satellite becomes idle.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::idle::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
@@ -124,7 +123,6 @@
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers when an Assist satellite starts listening.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::listening::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
@@ -135,7 +133,6 @@
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers when an Assist satellite is processing.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::processing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
@@ -146,7 +143,6 @@
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers when an Assist satellite is responding.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::responding::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
|
||||
@@ -12,8 +12,9 @@ from typing import Any, Protocol, cast
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
@@ -114,6 +115,51 @@ ATTR_SOURCE = "source"
|
||||
ATTR_VARIABLES = "variables"
|
||||
SERVICE_TRIGGER = "trigger"
|
||||
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG = "new_triggers_conditions"
|
||||
|
||||
_EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"light",
|
||||
}
|
||||
|
||||
_EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"alarm_control_panel",
|
||||
"assist_satellite",
|
||||
"climate",
|
||||
"cover",
|
||||
"fan",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
"media_player",
|
||||
"text",
|
||||
"vacuum",
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def is_disabled_experimental_condition(hass: HomeAssistant, platform: str) -> bool:
|
||||
"""Check if the platform is a disabled experimental condition platform."""
|
||||
return (
|
||||
platform in _EXPERIMENTAL_CONDITION_PLATFORMS
|
||||
and not labs.async_is_preview_feature_enabled(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def is_disabled_experimental_trigger(hass: HomeAssistant, platform: str) -> bool:
|
||||
"""Check if the platform is a disabled experimental trigger platform."""
|
||||
return (
|
||||
platform in _EXPERIMENTAL_TRIGGER_PLATFORMS
|
||||
and not labs.async_is_preview_feature_enabled(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class IfAction(Protocol):
|
||||
"""Define the format of if_action."""
|
||||
@@ -317,6 +363,20 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema({vol.Optional(CONF_ID): str}),
|
||||
)
|
||||
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
hass.async_create_task(
|
||||
reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
|
||||
async_labs_listen(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
new_triggers_conditions_listener,
|
||||
)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_config)
|
||||
|
||||
return True
|
||||
|
||||
@@ -17,8 +17,12 @@ from homeassistant.components.media_player import (
|
||||
class BangOlufsenSource:
|
||||
"""Class used for associating device source ids with friendly names. May not include all sources."""
|
||||
|
||||
DEEZER: Final[Source] = Source(name="Deezer", id="deezer")
|
||||
LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn")
|
||||
NET_RADIO: Final[Source] = Source(name="B&O Radio", id="netRadio")
|
||||
SPDIF: Final[Source] = Source(name="Optical", id="spdif")
|
||||
TIDAL: Final[Source] = Source(name="Tidal", id="tidal")
|
||||
UNKNOWN: Final[Source] = Source(name="Unknown Source", id="unknown")
|
||||
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
|
||||
|
||||
|
||||
@@ -78,6 +82,16 @@ class BangOlufsenModel(StrEnum):
|
||||
BEOREMOTE_ONE = "Beoremote One"
|
||||
|
||||
|
||||
class BangOlufsenAttribute(StrEnum):
|
||||
"""Enum for extra_state_attribute keys."""
|
||||
|
||||
BEOLINK = "beolink"
|
||||
BEOLINK_PEERS = "peers"
|
||||
BEOLINK_SELF = "self"
|
||||
BEOLINK_LEADER = "leader"
|
||||
BEOLINK_LISTENERS = "listeners"
|
||||
|
||||
|
||||
# Physical "buttons" on devices
|
||||
class BangOlufsenButtons(StrEnum):
|
||||
"""Enum for device buttons."""
|
||||
|
||||
@@ -82,6 +82,7 @@ from .const import (
|
||||
FALLBACK_SOURCES,
|
||||
MANUFACTURER,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenAttribute,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
WebsocketNotification,
|
||||
@@ -224,7 +225,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Beolink compatible sources
|
||||
self._beolink_sources: dict[str, bool] = {}
|
||||
self._remote_leader: BeolinkLeader | None = None
|
||||
# Extra state attributes for showing Beolink: peer(s), listener(s), leader and self
|
||||
# Extra state attributes:
|
||||
# Beolink: peer(s), listener(s), leader and self
|
||||
self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
@@ -436,7 +438,10 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
await self._async_update_beolink()
|
||||
|
||||
async def _async_update_beolink(self) -> None:
|
||||
"""Update the current Beolink leader, listeners, peers and self."""
|
||||
"""Update the current Beolink leader, listeners, peers and self.
|
||||
|
||||
Updates Home Assistant state.
|
||||
"""
|
||||
|
||||
self._beolink_attributes = {}
|
||||
|
||||
@@ -445,18 +450,24 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
# Add Beolink self
|
||||
self._beolink_attributes = {
|
||||
"beolink": {"self": {self.device_entry.name: self._beolink_jid}}
|
||||
BangOlufsenAttribute.BEOLINK: {
|
||||
BangOlufsenAttribute.BEOLINK_SELF: {
|
||||
self.device_entry.name: self._beolink_jid
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add Beolink peers
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
if len(peers) > 0:
|
||||
self._beolink_attributes["beolink"]["peers"] = {}
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_PEERS
|
||||
] = {}
|
||||
for peer in peers:
|
||||
self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = (
|
||||
peer.jid
|
||||
)
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_PEERS
|
||||
][peer.friendly_name] = peer.jid
|
||||
|
||||
# Add Beolink listeners / leader
|
||||
self._remote_leader = self._playback_metadata.remote_leader
|
||||
@@ -477,7 +488,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Add self
|
||||
group_members.append(self.entity_id)
|
||||
|
||||
self._beolink_attributes["beolink"]["leader"] = {
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_LEADER
|
||||
] = {
|
||||
self._remote_leader.friendly_name: self._remote_leader.jid,
|
||||
}
|
||||
|
||||
@@ -514,9 +527,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
beolink_listener.jid
|
||||
)
|
||||
break
|
||||
self._beolink_attributes["beolink"]["listeners"] = (
|
||||
beolink_listeners_attribute
|
||||
)
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_LISTENERS
|
||||
] = beolink_listeners_attribute
|
||||
|
||||
self._attr_group_members = group_members
|
||||
|
||||
@@ -615,11 +628,18 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
return None
|
||||
|
||||
@property
|
||||
def media_content_type(self) -> str:
|
||||
def media_content_type(self) -> MediaType | str | None:
|
||||
"""Return the current media type."""
|
||||
# Hard to determine content type
|
||||
if self._source_change.id == BangOlufsenSource.URI_STREAMER.id:
|
||||
return MediaType.URL
|
||||
content_type = {
|
||||
BangOlufsenSource.URI_STREAMER.id: MediaType.URL,
|
||||
BangOlufsenSource.DEEZER.id: BangOlufsenMediaType.DEEZER,
|
||||
BangOlufsenSource.TIDAL.id: BangOlufsenMediaType.TIDAL,
|
||||
BangOlufsenSource.NET_RADIO.id: BangOlufsenMediaType.RADIO,
|
||||
}
|
||||
# Hard to determine content type.
|
||||
if self._source_change.id in content_type:
|
||||
return content_type[self._source_change.id]
|
||||
|
||||
return MediaType.MUSIC
|
||||
|
||||
@property
|
||||
@@ -632,6 +652,11 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Return the current playback progress."""
|
||||
return self._playback_progress.progress
|
||||
|
||||
@property
|
||||
def media_content_id(self) -> str | None:
|
||||
"""Return internal ID of Deezer, Tidal and radio stations."""
|
||||
return self._playback_metadata.source_internal_id
|
||||
|
||||
@property
|
||||
def media_image_url(self) -> str | None:
|
||||
"""Return URL of the currently playing music."""
|
||||
|
||||
@@ -68,9 +68,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: BoschAlarmConfigEntry) -
|
||||
config_entry_id=entry.entry_id,
|
||||
connections={(CONNECTION_NETWORK_MAC, mac)} if mac else set(),
|
||||
identifiers={(DOMAIN, entry.unique_id or entry.entry_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
name=f"Bosch {panel.model.name}",
|
||||
manufacturer="Bosch Security Systems",
|
||||
model=panel.model,
|
||||
model=panel.model.name,
|
||||
sw_version=panel.firmware_version,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
@@ -83,7 +83,7 @@ async def try_connect(
|
||||
finally:
|
||||
await panel.disconnect()
|
||||
|
||||
return (panel.model, panel.serial_number)
|
||||
return (panel.model.name, panel.serial_number)
|
||||
|
||||
|
||||
class BoschAlarmConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
||||
@@ -20,7 +20,8 @@ async def async_get_config_entry_diagnostics(
|
||||
return {
|
||||
"entry_data": async_redact_data(entry.data, TO_REDACT),
|
||||
"data": {
|
||||
"model": entry.runtime_data.model,
|
||||
"model": entry.runtime_data.model.name,
|
||||
"family": entry.runtime_data.model.family.name,
|
||||
"serial_number": entry.runtime_data.serial_number,
|
||||
"protocol_version": entry.runtime_data.protocol_version,
|
||||
"firmware_version": entry.runtime_data.firmware_version,
|
||||
|
||||
@@ -26,7 +26,7 @@ class BoschAlarmEntity(Entity):
|
||||
self._attr_should_poll = False
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, unique_id)},
|
||||
name=f"Bosch {panel.model}",
|
||||
name=f"Bosch {panel.model.name}",
|
||||
manufacturer="Bosch Security Systems",
|
||||
)
|
||||
|
||||
|
||||
@@ -12,5 +12,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["bosch-alarm-mode2==0.4.6"]
|
||||
"requirements": ["bosch-alarm-mode2==0.4.10"]
|
||||
}
|
||||
|
||||
@@ -98,6 +98,12 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"started_cooling": {
|
||||
"trigger": "mdi:snowflake"
|
||||
},
|
||||
"started_drying": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
|
||||
@@ -298,9 +298,28 @@
|
||||
},
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"started_cooling": {
|
||||
"description": "Triggers when a climate started cooling.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate started cooling"
|
||||
},
|
||||
"started_drying": {
|
||||
"description": "Triggers when a climate started drying.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate started drying"
|
||||
},
|
||||
"started_heating": {
|
||||
"description": "Triggers when a climate starts to heat.",
|
||||
"description_configured": "[%key:component::climate::triggers::started_heating::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
@@ -311,7 +330,6 @@
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers when a climate is turned off.",
|
||||
"description_configured": "[%key:component::climate::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
@@ -322,7 +340,6 @@
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a climate is turned on.",
|
||||
"description_configured": "[%key:component::climate::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
|
||||
@@ -11,6 +11,12 @@ from homeassistant.helpers.trigger import (
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"started_cooling": make_entity_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.COOLING
|
||||
),
|
||||
"started_drying": make_entity_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.DRYING
|
||||
),
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_conditional_entity_state_trigger(
|
||||
DOMAIN,
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
- last
|
||||
- any
|
||||
|
||||
started_cooling: *trigger_common
|
||||
started_drying: *trigger_common
|
||||
started_heating: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
|
||||
@@ -6,6 +6,7 @@ import io
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
|
||||
from hass_nabucasa import NabuCasaBaseError
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMError,
|
||||
@@ -93,10 +94,11 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Home Assistant Cloud AI Task entity."""
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
if not (cloud := hass.data[DATA_CLOUD]).is_logged_in:
|
||||
return
|
||||
try:
|
||||
await cloud.llm.async_ensure_token()
|
||||
except LLMError:
|
||||
except (LLMError, NabuCasaBaseError):
|
||||
return
|
||||
|
||||
async_add_entities([CloudLLMTaskEntity(cloud, config_entry)])
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from hass_nabucasa import NabuCasaBaseError
|
||||
from hass_nabucasa.llm import LLMError
|
||||
|
||||
from homeassistant.components import conversation
|
||||
@@ -23,10 +24,11 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Home Assistant Cloud conversation entity."""
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
if not (cloud := hass.data[DATA_CLOUD]).is_logged_in:
|
||||
return
|
||||
try:
|
||||
await cloud.llm.async_ensure_token()
|
||||
except LLMError:
|
||||
except (LLMError, NabuCasaBaseError):
|
||||
return
|
||||
|
||||
async_add_entities([CloudConversationEntity(cloud, config_entry)])
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.6.1"],
|
||||
"requirements": ["hass-nabucasa==1.6.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -236,7 +236,9 @@ async def async_prepare_agent(
|
||||
|
||||
|
||||
async def async_handle_sentence_triggers(
|
||||
hass: HomeAssistant, user_input: ConversationInput
|
||||
hass: HomeAssistant,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
) -> str | None:
|
||||
"""Try to match input against sentence triggers and return response text.
|
||||
|
||||
@@ -245,12 +247,13 @@ async def async_handle_sentence_triggers(
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
return await agent.async_handle_sentence_triggers(user_input)
|
||||
return await agent.async_handle_sentence_triggers(user_input, chat_log)
|
||||
|
||||
|
||||
async def async_handle_intents(
|
||||
hass: HomeAssistant,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
*,
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None,
|
||||
) -> intent.IntentResponse | None:
|
||||
@@ -261,7 +264,9 @@ async def async_handle_intents(
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
return await agent.async_handle_intents(user_input, intent_filter=intent_filter)
|
||||
return await agent.async_handle_intents(
|
||||
user_input, chat_log, intent_filter=intent_filter
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
@@ -66,6 +66,7 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
floor_registry as fr,
|
||||
intent,
|
||||
llm,
|
||||
start as ha_start,
|
||||
template,
|
||||
translation,
|
||||
@@ -76,7 +77,7 @@ from homeassistant.util import language as language_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .chat_log import AssistantContent, ChatLog
|
||||
from .chat_log import AssistantContent, ChatLog, ToolResultContent
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
METADATA_CUSTOM_FILE,
|
||||
@@ -435,7 +436,7 @@ class DefaultAgent(ConversationEntity):
|
||||
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
|
||||
# Process callbacks and get response
|
||||
response_text = await self._handle_trigger_result(
|
||||
trigger_result, user_input
|
||||
trigger_result, user_input, chat_log
|
||||
)
|
||||
|
||||
# Convert to conversation result
|
||||
@@ -447,8 +448,9 @@ class DefaultAgent(ConversationEntity):
|
||||
if response is None:
|
||||
# Match intents
|
||||
intent_result = await self.async_recognize_intent(user_input)
|
||||
|
||||
response = await self._async_process_intent_result(
|
||||
intent_result, user_input
|
||||
intent_result, user_input, chat_log
|
||||
)
|
||||
|
||||
speech: str = response.speech.get("plain", {}).get("speech", "")
|
||||
@@ -467,6 +469,7 @@ class DefaultAgent(ConversationEntity):
|
||||
self,
|
||||
result: RecognizeResult | None,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
) -> intent.IntentResponse:
|
||||
"""Process user input with intents."""
|
||||
language = user_input.language or self.hass.config.language
|
||||
@@ -529,12 +532,21 @@ class DefaultAgent(ConversationEntity):
|
||||
ConversationTraceEventType.TOOL_CALL,
|
||||
{
|
||||
"intent_name": result.intent.name,
|
||||
"slots": {
|
||||
entity.name: entity.value or entity.text
|
||||
for entity in result.entities_list
|
||||
},
|
||||
"slots": {entity.name: entity.value for entity in result.entities_list},
|
||||
},
|
||||
)
|
||||
tool_input = llm.ToolInput(
|
||||
tool_name=result.intent.name,
|
||||
tool_args={entity.name: entity.value for entity in result.entities_list},
|
||||
external=True,
|
||||
)
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
AssistantContent(
|
||||
agent_id=user_input.agent_id,
|
||||
content=None,
|
||||
tool_calls=[tool_input],
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
intent_response = await intent.async_handle(
|
||||
@@ -597,6 +609,16 @@ class DefaultAgent(ConversationEntity):
|
||||
)
|
||||
intent_response.async_set_speech(speech)
|
||||
|
||||
tool_result = llm.IntentResponseDict(intent_response)
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
ToolResultContent(
|
||||
agent_id=user_input.agent_id,
|
||||
tool_call_id=tool_input.id,
|
||||
tool_name=tool_input.tool_name,
|
||||
tool_result=tool_result,
|
||||
)
|
||||
)
|
||||
|
||||
return intent_response
|
||||
|
||||
def _recognize(
|
||||
@@ -1523,16 +1545,31 @@ class DefaultAgent(ConversationEntity):
|
||||
)
|
||||
|
||||
async def _handle_trigger_result(
|
||||
self, result: SentenceTriggerResult, user_input: ConversationInput
|
||||
self,
|
||||
result: SentenceTriggerResult,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
) -> str:
|
||||
"""Run sentence trigger callbacks and return response text."""
|
||||
|
||||
# Gather callback responses in parallel
|
||||
trigger_callbacks = [
|
||||
self._triggers_details[trigger_id].callback(user_input, trigger_result)
|
||||
for trigger_id, trigger_result in result.matched_triggers.items()
|
||||
]
|
||||
|
||||
tool_input = llm.ToolInput(
|
||||
tool_name="trigger_sentence",
|
||||
tool_args={},
|
||||
external=True,
|
||||
)
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
AssistantContent(
|
||||
agent_id=user_input.agent_id,
|
||||
content=None,
|
||||
tool_calls=[tool_input],
|
||||
)
|
||||
)
|
||||
|
||||
# Use first non-empty result as response.
|
||||
#
|
||||
# There may be multiple copies of a trigger running when editing in
|
||||
@@ -1561,23 +1598,38 @@ class DefaultAgent(ConversationEntity):
|
||||
f"component.{DOMAIN}.conversation.agent.done", "Done"
|
||||
)
|
||||
|
||||
tool_result: dict[str, Any] = {"response": response_text}
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
ToolResultContent(
|
||||
agent_id=user_input.agent_id,
|
||||
tool_call_id=tool_input.id,
|
||||
tool_name=tool_input.tool_name,
|
||||
tool_result=tool_result,
|
||||
)
|
||||
)
|
||||
|
||||
return response_text
|
||||
|
||||
async def async_handle_sentence_triggers(
|
||||
self, user_input: ConversationInput
|
||||
self,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
) -> str | None:
|
||||
"""Try to input sentence against sentence triggers and return response text.
|
||||
|
||||
Returns None if no match occurred.
|
||||
"""
|
||||
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
|
||||
return await self._handle_trigger_result(trigger_result, user_input)
|
||||
return await self._handle_trigger_result(
|
||||
trigger_result, user_input, chat_log
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
async def async_handle_intents(
|
||||
self,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
*,
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None,
|
||||
) -> intent.IntentResponse | None:
|
||||
@@ -1593,7 +1645,7 @@ class DefaultAgent(ConversationEntity):
|
||||
# No error message on failed match
|
||||
return None
|
||||
|
||||
response = await self._async_process_intent_result(result, user_input)
|
||||
response = await self._async_process_intent_result(result, user_input, chat_log)
|
||||
if (
|
||||
response.response_type == intent.IntentResponseType.ERROR
|
||||
and response.error_code
|
||||
|
||||
@@ -8,6 +8,10 @@ from typing import Any
|
||||
from pycoolmasternet_async import SWING_MODES
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
@@ -31,7 +35,16 @@ CM_TO_HA_STATE = {
|
||||
|
||||
HA_STATE_TO_CM = {value: key for key, value in CM_TO_HA_STATE.items()}
|
||||
|
||||
FAN_MODES = ["low", "med", "high", "auto"]
|
||||
CM_TO_HA_FAN = {
|
||||
"low": FAN_LOW,
|
||||
"med": FAN_MEDIUM,
|
||||
"high": FAN_HIGH,
|
||||
"auto": FAN_AUTO,
|
||||
}
|
||||
|
||||
HA_FAN_TO_CM = {value: key for key, value in CM_TO_HA_FAN.items()}
|
||||
|
||||
FAN_MODES = list(CM_TO_HA_FAN.values())
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -111,7 +124,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
@property
|
||||
def fan_mode(self):
|
||||
"""Return the fan setting."""
|
||||
return self._unit.fan_speed
|
||||
return CM_TO_HA_FAN[self._unit.fan_speed]
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
@@ -138,7 +151,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new fan mode."""
|
||||
_LOGGER.debug("Setting fan mode of %s to %s", self.unique_id, fan_mode)
|
||||
self._unit = await self._unit.set_fan_speed(fan_mode)
|
||||
self._unit = await self._unit.set_fan_speed(HA_FAN_TO_CM[fan_mode])
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
|
||||
@@ -108,34 +108,5 @@
|
||||
"toggle_cover_tilt": {
|
||||
"service": "mdi:arrow-top-right-bottom-left"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"awning_opened": {
|
||||
"trigger": "mdi:awning-outline"
|
||||
},
|
||||
"blind_opened": {
|
||||
"trigger": "mdi:blinds-horizontal"
|
||||
},
|
||||
"curtain_opened": {
|
||||
"trigger": "mdi:curtains"
|
||||
},
|
||||
"door_opened": {
|
||||
"trigger": "mdi:door-open"
|
||||
},
|
||||
"garage_opened": {
|
||||
"trigger": "mdi:garage-open"
|
||||
},
|
||||
"gate_opened": {
|
||||
"trigger": "mdi:gate-open"
|
||||
},
|
||||
"shade_opened": {
|
||||
"trigger": "mdi:roller-shade"
|
||||
},
|
||||
"shutter_opened": {
|
||||
"trigger": "mdi:window-shutter-open"
|
||||
},
|
||||
"window_opened": {
|
||||
"trigger": "mdi:window-open"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,16 +1,4 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description_awning": "The behavior of the targeted awnings to trigger on.",
|
||||
"trigger_behavior_description_blind": "The behavior of the targeted blinds to trigger on.",
|
||||
"trigger_behavior_description_curtain": "The behavior of the targeted curtains to trigger on.",
|
||||
"trigger_behavior_description_door": "The behavior of the targeted doors to trigger on.",
|
||||
"trigger_behavior_description_garage": "The behavior of the targeted garage doors to trigger on.",
|
||||
"trigger_behavior_description_gate": "The behavior of the targeted gates to trigger on.",
|
||||
"trigger_behavior_description_shade": "The behavior of the targeted shades to trigger on.",
|
||||
"trigger_behavior_description_shutter": "The behavior of the targeted shutters to trigger on.",
|
||||
"trigger_behavior_description_window": "The behavior of the targeted windows to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"close": "Close {entity_name}",
|
||||
@@ -94,15 +82,6 @@
|
||||
"name": "Window"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"close_cover": {
|
||||
"description": "Closes a cover.",
|
||||
@@ -157,142 +136,5 @@
|
||||
"name": "Toggle tilt"
|
||||
}
|
||||
},
|
||||
"title": "Cover",
|
||||
"triggers": {
|
||||
"awning_opened": {
|
||||
"description": "Triggers when an awning opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::awning_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_awning%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the awnings to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When an awning opens"
|
||||
},
|
||||
"blind_opened": {
|
||||
"description": "Triggers when a blind opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::blind_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_blind%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the blinds to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a blind opens"
|
||||
},
|
||||
"curtain_opened": {
|
||||
"description": "Triggers when a curtain opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::curtain_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_curtain%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the curtains to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a curtain opens"
|
||||
},
|
||||
"door_opened": {
|
||||
"description": "Triggers when a door opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::door_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_door%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the doors to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a door opens"
|
||||
},
|
||||
"garage_opened": {
|
||||
"description": "Triggers when a garage door opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::garage_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_garage%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the garage doors to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a garage door opens"
|
||||
},
|
||||
"gate_opened": {
|
||||
"description": "Triggers when a gate opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::gate_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_gate%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the gates to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a gate opens"
|
||||
},
|
||||
"shade_opened": {
|
||||
"description": "Triggers when a shade opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::shade_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_shade%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the shades to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a shade opens"
|
||||
},
|
||||
"shutter_opened": {
|
||||
"description": "Triggers when a shutter opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::shutter_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_shutter%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the shutters to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a shutter opens"
|
||||
},
|
||||
"window_opened": {
|
||||
"description": "Triggers when a window opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::window_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_window%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the windows to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a window opens"
|
||||
}
|
||||
}
|
||||
"title": "Cover"
|
||||
}
|
||||
|
||||
@@ -1,116 +0,0 @@
|
||||
"""Provides triggers for covers."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
)
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from . import ATTR_CURRENT_POSITION, CoverDeviceClass, CoverState
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_FULLY_OPENED: Final = "fully_opened"
|
||||
|
||||
COVER_OPENED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(ATTR_FULLY_OPENED, default=False): bool,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_device_class_or_undefined(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> str | None | UndefinedType:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return get_device_class(hass, entity_id)
|
||||
except HomeAssistantError:
|
||||
return UNDEFINED
|
||||
|
||||
|
||||
class CoverOpenedClosedTrigger(EntityTriggerBase):
|
||||
"""Class for cover opened and closed triggers."""
|
||||
|
||||
_attribute: str = ATTR_CURRENT_POSITION
|
||||
_attribute_value: int | None = None
|
||||
_device_class: CoverDeviceClass | None
|
||||
_domain: str = DOMAIN
|
||||
_to_states: set[str]
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
if state.state not in self._to_states:
|
||||
return False
|
||||
if (
|
||||
self._attribute_value is not None
|
||||
and (value := state.attributes.get(self._attribute)) is not None
|
||||
and value != self._attribute_value
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if get_device_class_or_undefined(self._hass, entity_id)
|
||||
== self._device_class
|
||||
}
|
||||
|
||||
|
||||
class CoverOpenedTrigger(CoverOpenedClosedTrigger):
|
||||
"""Class for cover opened triggers."""
|
||||
|
||||
_schema = COVER_OPENED_TRIGGER_SCHEMA
|
||||
_to_states = {CoverState.OPEN, CoverState.OPENING}
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
if self._options.get(ATTR_FULLY_OPENED):
|
||||
self._attribute_value = 100
|
||||
|
||||
|
||||
def make_cover_opened_trigger(
|
||||
device_class: CoverDeviceClass | None,
|
||||
) -> type[CoverOpenedTrigger]:
|
||||
"""Create an entity state attribute trigger class."""
|
||||
|
||||
class CustomTrigger(CoverOpenedTrigger):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_device_class = device_class
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"awning_opened": make_cover_opened_trigger(CoverDeviceClass.AWNING),
|
||||
"blind_opened": make_cover_opened_trigger(CoverDeviceClass.BLIND),
|
||||
"curtain_opened": make_cover_opened_trigger(CoverDeviceClass.CURTAIN),
|
||||
"door_opened": make_cover_opened_trigger(CoverDeviceClass.DOOR),
|
||||
"garage_opened": make_cover_opened_trigger(CoverDeviceClass.GARAGE),
|
||||
"gate_opened": make_cover_opened_trigger(CoverDeviceClass.GATE),
|
||||
"shade_opened": make_cover_opened_trigger(CoverDeviceClass.SHADE),
|
||||
"shutter_opened": make_cover_opened_trigger(CoverDeviceClass.SHUTTER),
|
||||
"window_opened": make_cover_opened_trigger(CoverDeviceClass.WINDOW),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for covers."""
|
||||
return TRIGGERS
|
||||
@@ -1,79 +0,0 @@
|
||||
.trigger_common_fields: &trigger_common_fields
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
fully_opened:
|
||||
required: true
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
|
||||
awning_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: awning
|
||||
|
||||
blind_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: blind
|
||||
|
||||
curtain_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: curtain
|
||||
|
||||
door_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: door
|
||||
|
||||
garage_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: garage
|
||||
|
||||
gate_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: gate
|
||||
|
||||
shade_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: shade
|
||||
|
||||
shutter_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: shutter
|
||||
|
||||
window_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: window
|
||||
@@ -15,6 +15,11 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def normalize_pairing_code(code: str) -> str:
|
||||
"""Normalize pairing code by removing spaces and capitalizing."""
|
||||
return code.replace(" ", "").upper()
|
||||
|
||||
|
||||
class DropletConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle Droplet config flow."""
|
||||
|
||||
@@ -52,14 +57,13 @@ class DropletConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
# Test if we can connect before returning
|
||||
session = async_get_clientsession(self.hass)
|
||||
if await self._droplet_discovery.try_connect(
|
||||
session, user_input[CONF_CODE]
|
||||
):
|
||||
code = normalize_pairing_code(user_input[CONF_CODE])
|
||||
if await self._droplet_discovery.try_connect(session, code):
|
||||
device_data = {
|
||||
CONF_IP_ADDRESS: self._droplet_discovery.host,
|
||||
CONF_PORT: self._droplet_discovery.port,
|
||||
CONF_DEVICE_ID: device_id,
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
CONF_CODE: code,
|
||||
}
|
||||
|
||||
return self.async_create_entry(
|
||||
@@ -90,14 +94,15 @@ class DropletConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
user_input[CONF_IP_ADDRESS], DropletConnection.DEFAULT_PORT, ""
|
||||
)
|
||||
session = async_get_clientsession(self.hass)
|
||||
if await self._droplet_discovery.try_connect(
|
||||
session, user_input[CONF_CODE]
|
||||
) and (device_id := await self._droplet_discovery.get_device_id()):
|
||||
code = normalize_pairing_code(user_input[CONF_CODE])
|
||||
if await self._droplet_discovery.try_connect(session, code) and (
|
||||
device_id := await self._droplet_discovery.get_device_id()
|
||||
):
|
||||
device_data = {
|
||||
CONF_IP_ADDRESS: self._droplet_discovery.host,
|
||||
CONF_PORT: self._droplet_discovery.port,
|
||||
CONF_DEVICE_ID: device_id,
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
CONF_CODE: code,
|
||||
}
|
||||
await self.async_set_unique_id(device_id, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.4.0"]
|
||||
}
|
||||
|
||||
@@ -285,16 +285,14 @@ async def async_setup_entry(
|
||||
name=sensor.name,
|
||||
)
|
||||
|
||||
# Hourly rain doesn't reset to fixed hours, it must be measurement state classes
|
||||
# Only total rain needs state class for long-term statistics
|
||||
if sensor.key in (
|
||||
"hrain_piezomm",
|
||||
"hrain_piezo",
|
||||
"hourlyrainmm",
|
||||
"hourlyrainin",
|
||||
"totalrainin",
|
||||
"totalrainmm",
|
||||
):
|
||||
description = dataclasses.replace(
|
||||
description,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
)
|
||||
|
||||
async_add_entities([EcowittSensorEntity(sensor, description)])
|
||||
|
||||
401
homeassistant/components/energyid/__init__.py
Normal file
401
homeassistant/components/energyid/__init__.py
Normal file
@@ -0,0 +1,401 @@
|
||||
"""The EnergyID integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import datetime as dt
|
||||
from datetime import timedelta
|
||||
import functools
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from energyid_webhooks.client_v2 import WebhookClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_entity_registry_updated_event,
|
||||
async_track_state_change_event,
|
||||
async_track_time_interval,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_DEVICE_ID,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_ENERGYID_KEY,
|
||||
CONF_HA_ENTITY_UUID,
|
||||
CONF_PROVISIONING_KEY,
|
||||
CONF_PROVISIONING_SECRET,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type EnergyIDConfigEntry = ConfigEntry[EnergyIDRuntimeData]
|
||||
|
||||
DEFAULT_UPLOAD_INTERVAL_SECONDS = 60
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnergyIDRuntimeData:
|
||||
"""Runtime data for the EnergyID integration."""
|
||||
|
||||
client: WebhookClient
|
||||
mappings: dict[str, str]
|
||||
state_listener: CALLBACK_TYPE | None = None
|
||||
registry_tracking_listener: CALLBACK_TYPE | None = None
|
||||
unavailable_logged: bool = False
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> bool:
|
||||
"""Set up EnergyID from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
client = WebhookClient(
|
||||
provisioning_key=entry.data[CONF_PROVISIONING_KEY],
|
||||
provisioning_secret=entry.data[CONF_PROVISIONING_SECRET],
|
||||
device_id=entry.data[CONF_DEVICE_ID],
|
||||
device_name=entry.data[CONF_DEVICE_NAME],
|
||||
session=session,
|
||||
)
|
||||
|
||||
entry.runtime_data = EnergyIDRuntimeData(
|
||||
client=client,
|
||||
mappings={},
|
||||
)
|
||||
|
||||
is_claimed = None
|
||||
try:
|
||||
is_claimed = await client.authenticate()
|
||||
except TimeoutError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Timeout authenticating with EnergyID: {err}"
|
||||
) from err
|
||||
except ClientResponseError as err:
|
||||
# 401/403 = invalid credentials, trigger reauth
|
||||
if err.status in (401, 403):
|
||||
raise ConfigEntryAuthFailed(f"Invalid credentials: {err}") from err
|
||||
# Other HTTP errors are likely temporary
|
||||
raise ConfigEntryNotReady(
|
||||
f"HTTP error authenticating with EnergyID: {err}"
|
||||
) from err
|
||||
except ClientError as err:
|
||||
# Network/connection errors are temporary
|
||||
raise ConfigEntryNotReady(
|
||||
f"Connection error authenticating with EnergyID: {err}"
|
||||
) from err
|
||||
except Exception as err:
|
||||
# Unknown errors - log and retry (safer than forcing reauth)
|
||||
_LOGGER.exception("Unexpected error during EnergyID authentication")
|
||||
raise ConfigEntryNotReady(
|
||||
f"Unexpected error authenticating with EnergyID: {err}"
|
||||
) from err
|
||||
|
||||
if not is_claimed:
|
||||
# Device exists but not claimed = user needs to claim it = auth issue
|
||||
raise ConfigEntryAuthFailed("Device is not claimed. Please re-authenticate.")
|
||||
|
||||
_LOGGER.debug("EnergyID device '%s' authenticated successfully", client.device_name)
|
||||
|
||||
async def _async_synchronize_sensors(now: dt.datetime | None = None) -> None:
|
||||
"""Callback for periodically synchronizing sensor data."""
|
||||
try:
|
||||
await client.synchronize_sensors()
|
||||
if entry.runtime_data.unavailable_logged:
|
||||
_LOGGER.debug("Connection to EnergyID re-established")
|
||||
entry.runtime_data.unavailable_logged = False
|
||||
except (OSError, RuntimeError) as err:
|
||||
if not entry.runtime_data.unavailable_logged:
|
||||
_LOGGER.debug("EnergyID is unavailable: %s", err)
|
||||
entry.runtime_data.unavailable_logged = True
|
||||
|
||||
upload_interval = DEFAULT_UPLOAD_INTERVAL_SECONDS
|
||||
if client.webhook_policy:
|
||||
upload_interval = client.webhook_policy.get(
|
||||
"uploadInterval", DEFAULT_UPLOAD_INTERVAL_SECONDS
|
||||
)
|
||||
|
||||
# Schedule the callback and automatically unsubscribe when the entry is unloaded.
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval(
|
||||
hass, _async_synchronize_sensors, timedelta(seconds=upload_interval)
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
|
||||
update_listeners(hass, entry)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Starting EnergyID background sync for '%s'",
|
||||
client.device_name,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(
|
||||
hass: HomeAssistant, entry: EnergyIDConfigEntry
|
||||
) -> None:
|
||||
"""Handle config entry updates, including subentry changes."""
|
||||
_LOGGER.debug("Config entry updated for %s, reloading listeners", entry.entry_id)
|
||||
update_listeners(hass, entry)
|
||||
|
||||
|
||||
@callback
|
||||
def update_listeners(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> None:
|
||||
"""Set up or update state listeners and queue initial states."""
|
||||
runtime_data = entry.runtime_data
|
||||
client = runtime_data.client
|
||||
|
||||
# Clean up old state listener
|
||||
if runtime_data.state_listener:
|
||||
runtime_data.state_listener()
|
||||
runtime_data.state_listener = None
|
||||
|
||||
mappings: dict[str, str] = {}
|
||||
entities_to_track: list[str] = []
|
||||
old_mappings = set(runtime_data.mappings.keys())
|
||||
new_mappings = set()
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
subentries = list(entry.subentries.values())
|
||||
_LOGGER.debug(
|
||||
"Found %d subentries in entry.subentries: %s",
|
||||
len(subentries),
|
||||
[s.data for s in subentries],
|
||||
)
|
||||
|
||||
# Build current entity mappings
|
||||
tracked_entity_ids = []
|
||||
for subentry in subentries:
|
||||
entity_uuid = subentry.data.get(CONF_HA_ENTITY_UUID)
|
||||
energyid_key = subentry.data.get(CONF_ENERGYID_KEY)
|
||||
|
||||
if not (entity_uuid and energyid_key):
|
||||
continue
|
||||
|
||||
entity_entry = ent_reg.async_get(entity_uuid)
|
||||
if not entity_entry:
|
||||
_LOGGER.warning(
|
||||
"Entity with UUID %s does not exist, skipping mapping to %s",
|
||||
entity_uuid,
|
||||
energyid_key,
|
||||
)
|
||||
continue
|
||||
|
||||
ha_entity_id = entity_entry.entity_id
|
||||
tracked_entity_ids.append(ha_entity_id)
|
||||
|
||||
if not hass.states.get(ha_entity_id):
|
||||
# Entity exists in registry but is not present in the state machine
|
||||
_LOGGER.debug(
|
||||
"Entity %s does not exist in state machine yet, will track when available (mapping to %s)",
|
||||
ha_entity_id,
|
||||
energyid_key,
|
||||
)
|
||||
# Still add to entities_to_track so we can handle it when state appears
|
||||
entities_to_track.append(ha_entity_id)
|
||||
continue
|
||||
|
||||
mappings[ha_entity_id] = energyid_key
|
||||
entities_to_track.append(ha_entity_id)
|
||||
new_mappings.add(ha_entity_id)
|
||||
client.get_or_create_sensor(energyid_key)
|
||||
|
||||
if ha_entity_id not in old_mappings:
|
||||
_LOGGER.debug(
|
||||
"New mapping detected for %s, queuing initial state", ha_entity_id
|
||||
)
|
||||
if (
|
||||
current_state := hass.states.get(ha_entity_id)
|
||||
) and current_state.state not in (
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
):
|
||||
try:
|
||||
value = float(current_state.state)
|
||||
timestamp = current_state.last_updated or dt.datetime.now(dt.UTC)
|
||||
client.get_or_create_sensor(energyid_key).update(value, timestamp)
|
||||
except (ValueError, TypeError):
|
||||
_LOGGER.debug(
|
||||
"Could not convert initial state of %s to float: %s",
|
||||
ha_entity_id,
|
||||
current_state.state,
|
||||
)
|
||||
|
||||
# Clean up old entity registry listener
|
||||
if runtime_data.registry_tracking_listener:
|
||||
runtime_data.registry_tracking_listener()
|
||||
runtime_data.registry_tracking_listener = None
|
||||
|
||||
# Set up listeners for entity registry changes
|
||||
if tracked_entity_ids:
|
||||
_LOGGER.debug("Setting up entity registry tracking for: %s", tracked_entity_ids)
|
||||
|
||||
def _handle_entity_registry_change(
|
||||
event: Event[er.EventEntityRegistryUpdatedData],
|
||||
) -> None:
|
||||
"""Handle entity registry changes for our tracked entities."""
|
||||
_LOGGER.debug("Registry event for tracked entity: %s", event.data)
|
||||
|
||||
if event.data["action"] == "update":
|
||||
# Type is now narrowed to _EventEntityRegistryUpdatedData_Update
|
||||
if "entity_id" in event.data["changes"]:
|
||||
old_entity_id = event.data["changes"]["entity_id"]
|
||||
new_entity_id = event.data["entity_id"]
|
||||
|
||||
_LOGGER.debug(
|
||||
"Tracked entity ID changed: %s -> %s",
|
||||
old_entity_id,
|
||||
new_entity_id,
|
||||
)
|
||||
# Entity ID changed, need to reload listeners to track new ID
|
||||
update_listeners(hass, entry)
|
||||
|
||||
elif event.data["action"] == "remove":
|
||||
_LOGGER.debug("Tracked entity removed: %s", event.data["entity_id"])
|
||||
# reminder: Create repair issue to notify user about removed entity
|
||||
update_listeners(hass, entry)
|
||||
|
||||
# Track the specific entity IDs we care about
|
||||
unsub_entity_registry = async_track_entity_registry_updated_event(
|
||||
hass, tracked_entity_ids, _handle_entity_registry_change
|
||||
)
|
||||
runtime_data.registry_tracking_listener = unsub_entity_registry
|
||||
|
||||
if removed_mappings := old_mappings - new_mappings:
|
||||
_LOGGER.debug("Removed mappings: %s", ", ".join(removed_mappings))
|
||||
|
||||
runtime_data.mappings = mappings
|
||||
|
||||
if not entities_to_track:
|
||||
_LOGGER.debug(
|
||||
"No valid sensor mappings configured for '%s'", client.device_name
|
||||
)
|
||||
return
|
||||
|
||||
unsub_state_change = async_track_state_change_event(
|
||||
hass,
|
||||
entities_to_track,
|
||||
functools.partial(_async_handle_state_change, hass, entry.entry_id),
|
||||
)
|
||||
runtime_data.state_listener = unsub_state_change
|
||||
|
||||
_LOGGER.debug(
|
||||
"Now tracking state changes for %d entities for '%s': %s",
|
||||
len(entities_to_track),
|
||||
client.device_name,
|
||||
entities_to_track,
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_handle_state_change(
|
||||
hass: HomeAssistant, entry_id: str, event: Event[EventStateChangedData]
|
||||
) -> None:
|
||||
"""Handle state changes for tracked entities."""
|
||||
entity_id = event.data["entity_id"]
|
||||
new_state = event.data["new_state"]
|
||||
|
||||
_LOGGER.debug(
|
||||
"State change detected for entity: %s, new value: %s",
|
||||
entity_id,
|
||||
new_state.state if new_state else "None",
|
||||
)
|
||||
|
||||
if not new_state or new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
||||
return
|
||||
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
if not entry or not hasattr(entry, "runtime_data"):
|
||||
# Entry is being unloaded or not yet fully initialized
|
||||
return
|
||||
|
||||
runtime_data = entry.runtime_data
|
||||
client = runtime_data.client
|
||||
|
||||
# Check if entity is already mapped
|
||||
if energyid_key := runtime_data.mappings.get(entity_id):
|
||||
# Entity already mapped, just update value
|
||||
_LOGGER.debug(
|
||||
"Updating EnergyID sensor %s with value %s", energyid_key, new_state.state
|
||||
)
|
||||
else:
|
||||
# Entity not mapped yet - check if it should be (handles late-appearing entities)
|
||||
ent_reg = er.async_get(hass)
|
||||
for subentry in entry.subentries.values():
|
||||
entity_uuid = subentry.data.get(CONF_HA_ENTITY_UUID)
|
||||
energyid_key_candidate = subentry.data.get(CONF_ENERGYID_KEY)
|
||||
|
||||
if not (entity_uuid and energyid_key_candidate):
|
||||
continue
|
||||
|
||||
entity_entry = ent_reg.async_get(entity_uuid)
|
||||
if entity_entry and entity_entry.entity_id == entity_id:
|
||||
# Found it! Add to mappings and send initial value
|
||||
energyid_key = energyid_key_candidate
|
||||
runtime_data.mappings[entity_id] = energyid_key
|
||||
client.get_or_create_sensor(energyid_key)
|
||||
_LOGGER.debug(
|
||||
"Entity %s now available in state machine, adding to mappings (key: %s)",
|
||||
entity_id,
|
||||
energyid_key,
|
||||
)
|
||||
break
|
||||
else:
|
||||
# Not a tracked entity, ignore
|
||||
return
|
||||
|
||||
try:
|
||||
value = float(new_state.state)
|
||||
except (ValueError, TypeError):
|
||||
return
|
||||
|
||||
client.get_or_create_sensor(energyid_key).update(value, new_state.last_updated)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
_LOGGER.debug("Unloading EnergyID entry for %s", entry.title)
|
||||
|
||||
try:
|
||||
# Unload subentries if present (guarded for test and reload scenarios)
|
||||
if hasattr(hass.config_entries, "async_entries") and hasattr(entry, "entry_id"):
|
||||
subentries = [
|
||||
e.entry_id
|
||||
for e in hass.config_entries.async_entries(DOMAIN)
|
||||
if getattr(e, "parent_entry", None) == entry.entry_id
|
||||
]
|
||||
for subentry_id in subentries:
|
||||
await hass.config_entries.async_unload(subentry_id)
|
||||
|
||||
# Only clean up listeners and client if runtime_data is present
|
||||
if hasattr(entry, "runtime_data"):
|
||||
runtime_data = entry.runtime_data
|
||||
|
||||
# Remove state listener
|
||||
if runtime_data.state_listener:
|
||||
runtime_data.state_listener()
|
||||
|
||||
# Remove registry tracking listener
|
||||
if runtime_data.registry_tracking_listener:
|
||||
runtime_data.registry_tracking_listener()
|
||||
|
||||
try:
|
||||
await runtime_data.client.close()
|
||||
except Exception:
|
||||
_LOGGER.exception("Error closing EnergyID client for %s", entry.title)
|
||||
del entry.runtime_data
|
||||
except Exception:
|
||||
_LOGGER.exception("Error during async_unload_entry for %s", entry.title)
|
||||
return False
|
||||
return True
|
||||
293
homeassistant/components/energyid/config_flow.py
Normal file
293
homeassistant/components/energyid/config_flow.py
Normal file
@@ -0,0 +1,293 @@
|
||||
"""Config flow for EnergyID integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from energyid_webhooks.client_v2 import WebhookClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.instance_id import async_get as async_get_instance_id
|
||||
|
||||
from .const import (
|
||||
CONF_DEVICE_ID,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_PROVISIONING_KEY,
|
||||
CONF_PROVISIONING_SECRET,
|
||||
DOMAIN,
|
||||
ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX,
|
||||
MAX_POLLING_ATTEMPTS,
|
||||
NAME,
|
||||
POLLING_INTERVAL,
|
||||
)
|
||||
from .energyid_sensor_mapping_flow import EnergyIDSensorMappingFlowHandler
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EnergyIDConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the configuration flow for the EnergyID integration."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._flow_data: dict[str, Any] = {}
|
||||
self._polling_task: asyncio.Task | None = None
|
||||
|
||||
async def _perform_auth_and_get_details(self) -> str | None:
|
||||
"""Authenticate with EnergyID and retrieve device details."""
|
||||
_LOGGER.debug("Starting authentication with EnergyID")
|
||||
client = WebhookClient(
|
||||
provisioning_key=self._flow_data[CONF_PROVISIONING_KEY],
|
||||
provisioning_secret=self._flow_data[CONF_PROVISIONING_SECRET],
|
||||
device_id=self._flow_data[CONF_DEVICE_ID],
|
||||
device_name=self._flow_data[CONF_DEVICE_NAME],
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
try:
|
||||
is_claimed = await client.authenticate()
|
||||
except ClientResponseError as err:
|
||||
if err.status == 401:
|
||||
_LOGGER.debug("Invalid provisioning key or secret")
|
||||
return "invalid_auth"
|
||||
_LOGGER.debug(
|
||||
"Client response error during EnergyID authentication: %s", err
|
||||
)
|
||||
return "cannot_connect"
|
||||
except ClientError as err:
|
||||
_LOGGER.debug(
|
||||
"Failed to connect to EnergyID during authentication: %s", err
|
||||
)
|
||||
return "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error during EnergyID authentication")
|
||||
return "unknown_auth_error"
|
||||
else:
|
||||
_LOGGER.debug("Authentication successful, claimed: %s", is_claimed)
|
||||
|
||||
if is_claimed:
|
||||
self._flow_data["record_number"] = client.recordNumber
|
||||
self._flow_data["record_name"] = client.recordName
|
||||
_LOGGER.debug(
|
||||
"Device claimed with record number: %s, record name: %s",
|
||||
client.recordNumber,
|
||||
client.recordName,
|
||||
)
|
||||
return None
|
||||
|
||||
self._flow_data["claim_info"] = client.get_claim_info()
|
||||
self._flow_data["claim_info"]["integration_name"] = NAME
|
||||
_LOGGER.debug(
|
||||
"Device needs claim, claim info: %s", self._flow_data["claim_info"]
|
||||
)
|
||||
return "needs_claim"
|
||||
|
||||
async def _async_poll_for_claim(self) -> None:
|
||||
"""Poll EnergyID to check if device has been claimed."""
|
||||
for _attempt in range(1, MAX_POLLING_ATTEMPTS + 1):
|
||||
await asyncio.sleep(POLLING_INTERVAL)
|
||||
|
||||
auth_status = await self._perform_auth_and_get_details()
|
||||
|
||||
if auth_status is None:
|
||||
# Device claimed - advance flow to async_step_create_entry
|
||||
_LOGGER.debug("Device claimed, advancing to create entry")
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_configure(self.flow_id)
|
||||
)
|
||||
return
|
||||
|
||||
if auth_status != "needs_claim":
|
||||
# Stop polling on non-transient errors
|
||||
# No user notification needed here as the error will be handled
|
||||
# in the next flow step when the user continues the flow
|
||||
_LOGGER.debug("Polling stopped due to error: %s", auth_status)
|
||||
return
|
||||
|
||||
_LOGGER.debug("Polling timeout after %s attempts", MAX_POLLING_ATTEMPTS)
|
||||
# No user notification here because:
|
||||
# 1. User may still be completing the claim process in EnergyID portal
|
||||
# 2. Immediate notification could interrupt their workflow or cause confusion
|
||||
# 3. When user clicks "Submit" to continue, the flow validates claim status
|
||||
# and will show appropriate error/success messages based on current state
|
||||
# 4. Timeout allows graceful fallback: user can retry claim or see proper error
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step of the configuration flow."""
|
||||
_LOGGER.debug("Starting user step with input: %s", user_input)
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
instance_id = await async_get_instance_id(self.hass)
|
||||
# Note: This device_id is for EnergyID's webhook system, not related to HA's device registry
|
||||
device_suffix = f"{int(asyncio.get_event_loop().time() * 1000)}"
|
||||
device_id = (
|
||||
f"{ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX}{instance_id}_{device_suffix}"
|
||||
)
|
||||
self._flow_data = {
|
||||
**user_input,
|
||||
CONF_DEVICE_ID: device_id,
|
||||
CONF_DEVICE_NAME: self.hass.config.location_name,
|
||||
}
|
||||
_LOGGER.debug("Flow data after user input: %s", self._flow_data)
|
||||
|
||||
auth_status = await self._perform_auth_and_get_details()
|
||||
|
||||
if auth_status is None:
|
||||
await self.async_set_unique_id(device_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
_LOGGER.debug(
|
||||
"Creating entry with title: %s", self._flow_data["record_name"]
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=self._flow_data["record_name"],
|
||||
data=self._flow_data,
|
||||
description="add_sensor_mapping_hint",
|
||||
description_placeholders={"integration_name": NAME},
|
||||
)
|
||||
|
||||
if auth_status == "needs_claim":
|
||||
_LOGGER.debug("Redirecting to auth and claim step")
|
||||
return await self.async_step_auth_and_claim()
|
||||
|
||||
errors["base"] = auth_status
|
||||
_LOGGER.debug("Errors encountered during user step: %s", errors)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PROVISIONING_KEY): str,
|
||||
vol.Required(CONF_PROVISIONING_SECRET): cv.string,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"docs_url": "https://app.energyid.eu/integrations/home-assistant",
|
||||
"integration_name": NAME,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_auth_and_claim(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the step for device claiming using external step with polling."""
|
||||
_LOGGER.debug("Starting auth and claim step with input: %s", user_input)
|
||||
|
||||
claim_info = self._flow_data.get("claim_info", {})
|
||||
|
||||
# Start polling when we first enter this step
|
||||
if self._polling_task is None:
|
||||
self._polling_task = self.hass.async_create_task(
|
||||
self._async_poll_for_claim()
|
||||
)
|
||||
|
||||
# Show external step to open the EnergyID website
|
||||
return self.async_external_step(
|
||||
step_id="auth_and_claim",
|
||||
url=claim_info.get("claim_url", ""),
|
||||
description_placeholders=claim_info,
|
||||
)
|
||||
|
||||
# Check if device has been claimed
|
||||
auth_status = await self._perform_auth_and_get_details()
|
||||
|
||||
if auth_status is None:
|
||||
# Device has been claimed
|
||||
if self._polling_task and not self._polling_task.done():
|
||||
self._polling_task.cancel()
|
||||
self._polling_task = None
|
||||
return self.async_external_step_done(next_step_id="create_entry")
|
||||
|
||||
# Device not claimed yet, show the external step again
|
||||
if self._polling_task and not self._polling_task.done():
|
||||
self._polling_task.cancel()
|
||||
self._polling_task = None
|
||||
return self.async_external_step(
|
||||
step_id="auth_and_claim",
|
||||
url=claim_info.get("claim_url", ""),
|
||||
description_placeholders=claim_info,
|
||||
)
|
||||
|
||||
async def async_step_create_entry(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Final step to create the entry after successful claim."""
|
||||
_LOGGER.debug("Creating entry with title: %s", self._flow_data["record_name"])
|
||||
return self.async_create_entry(
|
||||
title=self._flow_data["record_name"],
|
||||
data=self._flow_data,
|
||||
description="add_sensor_mapping_hint",
|
||||
description_placeholders={"integration_name": NAME},
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
# Note: This device_id is for EnergyID's webhook system, not related to HA's device registry
|
||||
self._flow_data = {
|
||||
CONF_DEVICE_ID: entry_data[CONF_DEVICE_ID],
|
||||
CONF_DEVICE_NAME: entry_data[CONF_DEVICE_NAME],
|
||||
}
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._flow_data.update(user_input)
|
||||
auth_status = await self._perform_auth_and_get_details()
|
||||
|
||||
if auth_status is None:
|
||||
# Authentication successful and claimed
|
||||
await self.async_set_unique_id(self._flow_data["record_number"])
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={
|
||||
CONF_PROVISIONING_KEY: user_input[CONF_PROVISIONING_KEY],
|
||||
CONF_PROVISIONING_SECRET: user_input[CONF_PROVISIONING_SECRET],
|
||||
},
|
||||
)
|
||||
|
||||
if auth_status == "needs_claim":
|
||||
return await self.async_step_auth_and_claim()
|
||||
|
||||
errors["base"] = auth_status
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PROVISIONING_KEY): str,
|
||||
vol.Required(CONF_PROVISIONING_SECRET): cv.string,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"docs_url": "https://app.energyid.eu/integrations/home-assistant",
|
||||
"integration_name": NAME,
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"sensor_mapping": EnergyIDSensorMappingFlowHandler}
|
||||
21
homeassistant/components/energyid/const.py
Normal file
21
homeassistant/components/energyid/const.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Constants for the EnergyID integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "energyid"
|
||||
NAME: Final = "EnergyID"
|
||||
|
||||
# --- Config Flow and Entry Data ---
|
||||
CONF_PROVISIONING_KEY: Final = "provisioning_key"
|
||||
CONF_PROVISIONING_SECRET: Final = "provisioning_secret"
|
||||
CONF_DEVICE_ID: Final = "device_id"
|
||||
CONF_DEVICE_NAME: Final = "device_name"
|
||||
|
||||
# --- Subentry (Mapping) Data ---
|
||||
CONF_HA_ENTITY_UUID: Final = "ha_entity_uuid"
|
||||
CONF_ENERGYID_KEY: Final = "energyid_key"
|
||||
|
||||
# --- Webhook and Polling Configuration ---
|
||||
ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX: Final = "homeassistant_eid_"
|
||||
POLLING_INTERVAL: Final = 2 # seconds
|
||||
MAX_POLLING_ATTEMPTS: Final = 60 # 2 minutes total
|
||||
@@ -0,0 +1,156 @@
|
||||
"""Subentry flow for EnergyID integration, handling sensor mapping management."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
|
||||
from homeassistant.config_entries import ConfigSubentryFlow, SubentryFlowResult
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig
|
||||
|
||||
from .const import CONF_ENERGYID_KEY, CONF_HA_ENTITY_UUID, DOMAIN, NAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def _get_suggested_entities(hass: HomeAssistant) -> list[str]:
|
||||
"""Return a sorted list of suggested sensor entity IDs for mapping."""
|
||||
ent_reg = er.async_get(hass)
|
||||
suitable_entities = []
|
||||
|
||||
for entity_entry in ent_reg.entities.values():
|
||||
if not (
|
||||
entity_entry.domain == Platform.SENSOR and entity_entry.platform != DOMAIN
|
||||
):
|
||||
continue
|
||||
|
||||
if not hass.states.get(entity_entry.entity_id):
|
||||
continue
|
||||
|
||||
state_class = (entity_entry.capabilities or {}).get("state_class")
|
||||
has_numeric_indicators = (
|
||||
state_class
|
||||
in (
|
||||
SensorStateClass.MEASUREMENT,
|
||||
SensorStateClass.TOTAL,
|
||||
SensorStateClass.TOTAL_INCREASING,
|
||||
)
|
||||
or entity_entry.device_class
|
||||
in (
|
||||
SensorDeviceClass.ENERGY,
|
||||
SensorDeviceClass.GAS,
|
||||
SensorDeviceClass.POWER,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
SensorDeviceClass.VOLUME,
|
||||
)
|
||||
or entity_entry.original_device_class
|
||||
in (
|
||||
SensorDeviceClass.ENERGY,
|
||||
SensorDeviceClass.GAS,
|
||||
SensorDeviceClass.POWER,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
SensorDeviceClass.VOLUME,
|
||||
)
|
||||
)
|
||||
|
||||
if has_numeric_indicators:
|
||||
suitable_entities.append(entity_entry.entity_id)
|
||||
|
||||
return sorted(suitable_entities)
|
||||
|
||||
|
||||
@callback
|
||||
def _validate_mapping_input(
|
||||
ha_entity_id: str | None,
|
||||
current_mappings: set[str],
|
||||
ent_reg: er.EntityRegistry,
|
||||
) -> dict[str, str]:
|
||||
"""Validate mapping input and return errors if any."""
|
||||
errors: dict[str, str] = {}
|
||||
if not ha_entity_id:
|
||||
errors["base"] = "entity_required"
|
||||
return errors
|
||||
|
||||
# Check if entity exists
|
||||
entity_entry = ent_reg.async_get(ha_entity_id)
|
||||
if not entity_entry:
|
||||
errors["base"] = "entity_not_found"
|
||||
return errors
|
||||
|
||||
# Check if entity is already mapped (by UUID)
|
||||
entity_uuid = entity_entry.id
|
||||
if entity_uuid in current_mappings:
|
||||
errors["base"] = "entity_already_mapped"
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
class EnergyIDSensorMappingFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle EnergyID sensor mapping subentry flow for adding new mappings."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Handle the user step for adding a new sensor mapping."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
config_entry = self._get_entry()
|
||||
ent_reg = er.async_get(self.hass)
|
||||
|
||||
if user_input is not None:
|
||||
ha_entity_id = user_input.get("ha_entity_id")
|
||||
|
||||
# Get current mappings by UUID
|
||||
current_mappings = {
|
||||
uuid
|
||||
for sub in config_entry.subentries.values()
|
||||
if (uuid := sub.data.get(CONF_HA_ENTITY_UUID)) is not None
|
||||
}
|
||||
|
||||
errors = _validate_mapping_input(ha_entity_id, current_mappings, ent_reg)
|
||||
|
||||
if not errors and ha_entity_id:
|
||||
# Get entity registry entry
|
||||
entity_entry = ent_reg.async_get(ha_entity_id)
|
||||
if entity_entry:
|
||||
energyid_key = ha_entity_id.split(".", 1)[-1]
|
||||
|
||||
subentry_data = {
|
||||
CONF_HA_ENTITY_UUID: entity_entry.id, # Store UUID only
|
||||
CONF_ENERGYID_KEY: energyid_key,
|
||||
}
|
||||
|
||||
title = f"{ha_entity_id.split('.', 1)[-1]} connection to {NAME}"
|
||||
_LOGGER.debug(
|
||||
"Creating subentry with title='%s', data=%s",
|
||||
title,
|
||||
subentry_data,
|
||||
)
|
||||
_LOGGER.debug("Parent config entry ID: %s", config_entry.entry_id)
|
||||
_LOGGER.debug(
|
||||
"Creating subentry with parent: %s", self._get_entry().entry_id
|
||||
)
|
||||
return self.async_create_entry(title=title, data=subentry_data)
|
||||
errors["base"] = "entity_not_found"
|
||||
|
||||
suggested_entities = _get_suggested_entities(self.hass)
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required("ha_entity_id"): EntitySelector(
|
||||
EntitySelectorConfig(include_entities=suggested_entities)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
description_placeholders={"integration_name": NAME},
|
||||
)
|
||||
12
homeassistant/components/energyid/manifest.json
Normal file
12
homeassistant/components/energyid/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "energyid",
|
||||
"name": "EnergyID",
|
||||
"codeowners": ["@JrtPec", "@Molier"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/energyid",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["energyid_webhooks"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["energyid-webhooks==0.0.14"]
|
||||
}
|
||||
137
homeassistant/components/energyid/quality_scale.yaml
Normal file
137
homeassistant/components/energyid/quality_scale.yaml
Normal file
@@ -0,0 +1,137 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: The integration does not expose any custom service actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: The integration uses a push-based mechanism with a background sync task, not polling.
|
||||
brands:
|
||||
status: done
|
||||
common-modules:
|
||||
status: done
|
||||
config-flow-test-coverage:
|
||||
status: done
|
||||
config-flow:
|
||||
status: done
|
||||
dependency-transparency:
|
||||
status: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: The integration does not expose any custom service actions.
|
||||
docs-high-level-description:
|
||||
status: done
|
||||
docs-installation-instructions:
|
||||
status: done
|
||||
docs-removal-instructions:
|
||||
status: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
runtime-data:
|
||||
status: done
|
||||
test-before-configure:
|
||||
status: done
|
||||
test-before-setup:
|
||||
status: done
|
||||
unique-config-entry:
|
||||
status: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: The integration does not expose any custom service actions.
|
||||
config-entry-unloading:
|
||||
status: done
|
||||
docs-configuration-parameters:
|
||||
status: done
|
||||
docs-installation-parameters:
|
||||
status: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
integration-owner:
|
||||
status: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: The integration logs a single message when the EnergyID service is unavailable.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
reauthentication-flow:
|
||||
status: done
|
||||
test-coverage:
|
||||
status: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: The integration does not create any entities, nor does it create devices.
|
||||
diagnostics:
|
||||
status: todo
|
||||
comment: Diagnostics will be added in a follow-up PR to help with debugging.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Configuration requires manual entry of provisioning credentials.
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery mechanism is used.
|
||||
docs-data-update:
|
||||
status: done
|
||||
docs-examples:
|
||||
status: done
|
||||
docs-known-limitations:
|
||||
status: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This is a service integration not tied to specific device models.
|
||||
docs-supported-functions:
|
||||
status: done
|
||||
docs-troubleshooting:
|
||||
status: done
|
||||
docs-use-cases:
|
||||
status: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: The integration creates a single device entry for the service connection.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
exception-translations:
|
||||
status: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
reconfiguration-flow:
|
||||
status: todo
|
||||
comment: Reconfiguration will be added in a follow-up PR to allow updating the device name.
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: Authentication issues are handled via the reauthentication flow.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Creates a single service device entry tied to the config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: done
|
||||
inject-websession:
|
||||
status: done
|
||||
strict-typing:
|
||||
status: todo
|
||||
comment: Full strict typing compliance will be addressed in a future update.
|
||||
71
homeassistant/components/energyid/strings.json
Normal file
71
homeassistant/components/energyid/strings.json
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "This device is already configured.",
|
||||
"reauth_successful": "Reauthentication successful."
|
||||
},
|
||||
"create_entry": {
|
||||
"add_sensor_mapping_hint": "You can now add mappings from any sensor in Home Assistant to {integration_name} using the '+ add sensor mapping' button."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Failed to connect to {integration_name} API.",
|
||||
"claim_failed_or_timed_out": "Claiming the device failed or the code expired.",
|
||||
"invalid_auth": "Invalid provisioning key or secret.",
|
||||
"unknown_auth_error": "Unexpected error occurred during authentication."
|
||||
},
|
||||
"step": {
|
||||
"auth_and_claim": {
|
||||
"description": "This Home Assistant connection needs to be claimed in your {integration_name} portal before it can send data.\n\n1. Go to: {claim_url}\n2. Enter code: **{claim_code}**\n3. (Code expires: {valid_until})\n\nAfter successfully claiming the device in {integration_name}, select **Submit** below to continue.",
|
||||
"title": "Claim device in {integration_name}"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"provisioning_key": "[%key:component::energyid::config::step::user::data::provisioning_key%]",
|
||||
"provisioning_secret": "[%key:component::energyid::config::step::user::data::provisioning_secret%]"
|
||||
},
|
||||
"data_description": {
|
||||
"provisioning_key": "[%key:component::energyid::config::step::user::data_description::provisioning_key%]",
|
||||
"provisioning_secret": "[%key:component::energyid::config::step::user::data_description::provisioning_secret%]"
|
||||
},
|
||||
"description": "Please re-enter your {integration_name} provisioning key and secret to restore the connection.\n\nMore info: {docs_url}",
|
||||
"title": "Reauthenticate {integration_name}"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"provisioning_key": "Provisioning key",
|
||||
"provisioning_secret": "Provisioning secret"
|
||||
},
|
||||
"data_description": {
|
||||
"provisioning_key": "Your unique key for provisioning.",
|
||||
"provisioning_secret": "Your secret associated with the provisioning key."
|
||||
},
|
||||
"description": "Enter your {integration_name} webhook provisioning key and secret. Find these in your {integration_name} integration setup under provisioning credentials.\n\nMore info: {docs_url}",
|
||||
"title": "Connect to {integration_name}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"sensor_mapping": {
|
||||
"entry_type": "service",
|
||||
"error": {
|
||||
"entity_already_mapped": "This Home Assistant entity is already mapped.",
|
||||
"entity_required": "You must select a sensor entity."
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "Add sensor mapping"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"ha_entity_id": "Home Assistant sensor"
|
||||
},
|
||||
"data_description": {
|
||||
"ha_entity_id": "Select the sensor from Home Assistant to send to {integration_name}."
|
||||
},
|
||||
"description": "Select a Home Assistant sensor to send to {integration_name}. The sensor name will be used as the {integration_name} metric key.",
|
||||
"title": "Add sensor mapping"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.4.0"],
|
||||
"requirements": ["pyenphase==2.4.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -25,6 +25,7 @@ from .domain_data import DomainData
|
||||
from .encryption_key_storage import async_get_encryption_key_storage
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
from .manager import DEVICE_CONFLICT_ISSUE_FORMAT, ESPHomeManager, cleanup_instance
|
||||
from .websocket_api import async_setup as async_setup_websocket_api
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -38,6 +39,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
ffmpeg_proxy.async_setup(hass)
|
||||
await assist_satellite.async_setup(hass)
|
||||
await dashboard.async_setup(hass)
|
||||
async_setup_websocket_api(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
DEFAULT_PORT: Final = 6053
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2025.8.0"
|
||||
STABLE_BLE_VERSION_STR = "2025.11.0"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.8.0",
|
||||
"aioesphomeapi==42.10.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
@@ -157,7 +157,7 @@
|
||||
"title": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::title%]"
|
||||
},
|
||||
"ble_firmware_outdated": {
|
||||
"description": "To improve Bluetooth reliability and performance, we highly recommend updating {name} with ESPHome {version} or later. When updating the device from ESPHome earlier than 2022.12.0, it is recommended to use a serial cable instead of an over-the-air update to take advantage of the new partition scheme.",
|
||||
"description": "ESPHome {version} introduces ultra-low latency event processing, reducing BLE event delays from 0-16 milliseconds to approximately 12 microseconds. This resolves stability issues when pairing, connecting, or handshaking with devices that require low latency, and makes Bluetooth proxy operations rival or exceed local adapters. We highly recommend updating {name} to take advantage of these improvements.",
|
||||
"title": "Update {name} with ESPHome {version} or later"
|
||||
},
|
||||
"device_conflict": {
|
||||
|
||||
52
homeassistant/components/esphome/websocket_api.py
Normal file
52
homeassistant/components/esphome/websocket_api.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""ESPHome websocket API."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import CONF_NOISE_PSK
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
TYPE = "type"
|
||||
ENTRY_ID = "entry_id"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the websocket API."""
|
||||
websocket_api.async_register_command(hass, get_encryption_key)
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required(TYPE): "esphome/get_encryption_key",
|
||||
vol.Required(ENTRY_ID): str,
|
||||
}
|
||||
)
|
||||
def get_encryption_key(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.connection.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get the encryption key for an ESPHome config entry."""
|
||||
entry = hass.config_entries.async_get_entry(msg[ENTRY_ID])
|
||||
if entry is None:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.ERR_NOT_FOUND, "Config entry not found"
|
||||
)
|
||||
return
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"encryption_key": entry.data.get(CONF_NOISE_PSK),
|
||||
},
|
||||
)
|
||||
@@ -102,6 +102,7 @@ SENSORS: tuple[EssentSensorEntityDescription, ...] = (
|
||||
key="average_today",
|
||||
translation_key="average_today",
|
||||
value_fn=lambda energy_data: energy_data.avg_price,
|
||||
energy_types=(EnergyType.ELECTRICITY,),
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="lowest_price_today",
|
||||
|
||||
@@ -44,9 +44,6 @@
|
||||
"electricity_next_price": {
|
||||
"name": "Next electricity price"
|
||||
},
|
||||
"gas_average_today": {
|
||||
"name": "Average gas price today"
|
||||
},
|
||||
"gas_current_price": {
|
||||
"name": "Current gas price"
|
||||
},
|
||||
|
||||
@@ -167,7 +167,6 @@
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers when a fan is turned off.",
|
||||
"description_configured": "[%key:component::fan::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::common::trigger_behavior_description%]",
|
||||
@@ -178,7 +177,6 @@
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a fan is turned on.",
|
||||
"description_configured": "[%key:component::fan::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::common::trigger_behavior_description%]",
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
"""API for fitbit bound to Home Assistant OAuth."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from fitbit import Fitbit
|
||||
from fitbit.exceptions import HTTPException, HTTPUnauthorized
|
||||
from fitbit_web_api import ApiClient, Configuration, DevicesApi
|
||||
from fitbit_web_api.exceptions import (
|
||||
ApiException,
|
||||
OpenApiException,
|
||||
UnauthorizedException,
|
||||
)
|
||||
from fitbit_web_api.models.device import Device
|
||||
from requests.exceptions import ConnectionError as RequestsConnectionError
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import FitbitUnitSystem
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice, FitbitProfile
|
||||
from .model import FitbitProfile
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -58,6 +66,14 @@ class FitbitApi(ABC):
|
||||
expires_at=float(token[CONF_EXPIRES_AT]),
|
||||
)
|
||||
|
||||
async def _async_get_fitbit_web_api(self) -> ApiClient:
|
||||
"""Create and return an ApiClient configured with the current access token."""
|
||||
token = await self.async_get_access_token()
|
||||
configuration = Configuration()
|
||||
configuration.pool_manager = async_get_clientsession(self._hass)
|
||||
configuration.access_token = token[CONF_ACCESS_TOKEN]
|
||||
return ApiClient(configuration)
|
||||
|
||||
async def async_get_user_profile(self) -> FitbitProfile:
|
||||
"""Return the user profile from the API."""
|
||||
if self._profile is None:
|
||||
@@ -94,21 +110,13 @@ class FitbitApi(ABC):
|
||||
return FitbitUnitSystem.METRIC
|
||||
return FitbitUnitSystem.EN_US
|
||||
|
||||
async def async_get_devices(self) -> list[FitbitDevice]:
|
||||
"""Return available devices."""
|
||||
client = await self._async_get_client()
|
||||
devices: list[dict[str, str]] = await self._run(client.get_devices)
|
||||
async def async_get_devices(self) -> list[Device]:
|
||||
"""Return available devices using fitbit-web-api."""
|
||||
client = await self._async_get_fitbit_web_api()
|
||||
devices_api = DevicesApi(client)
|
||||
devices: list[Device] = await self._run_async(devices_api.get_devices)
|
||||
_LOGGER.debug("get_devices=%s", devices)
|
||||
return [
|
||||
FitbitDevice(
|
||||
id=device["id"],
|
||||
device_version=device["deviceVersion"],
|
||||
battery_level=int(device["batteryLevel"]),
|
||||
battery=device["battery"],
|
||||
type=device["type"],
|
||||
)
|
||||
for device in devices
|
||||
]
|
||||
return devices
|
||||
|
||||
async def async_get_latest_time_series(self, resource_type: str) -> dict[str, Any]:
|
||||
"""Return the most recent value from the time series for the specified resource type."""
|
||||
@@ -140,6 +148,20 @@ class FitbitApi(ABC):
|
||||
_LOGGER.debug("Error from fitbit API: %s", err)
|
||||
raise FitbitApiException("Error from fitbit API") from err
|
||||
|
||||
async def _run_async[_T](self, func: Callable[[], Awaitable[_T]]) -> _T:
|
||||
"""Run client command."""
|
||||
try:
|
||||
return await func()
|
||||
except UnauthorizedException as err:
|
||||
_LOGGER.debug("Unauthorized error from fitbit API: %s", err)
|
||||
raise FitbitAuthException("Authentication error from fitbit API") from err
|
||||
except ApiException as err:
|
||||
_LOGGER.debug("Error from fitbit API: %s", err)
|
||||
raise FitbitApiException("Error from fitbit API") from err
|
||||
except OpenApiException as err:
|
||||
_LOGGER.debug("Error communicating with fitbit API: %s", err)
|
||||
raise FitbitApiException("Communication error from fitbit API") from err
|
||||
|
||||
|
||||
class OAuthFitbitApi(FitbitApi):
|
||||
"""Provide fitbit authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
@@ -6,6 +6,8 @@ import datetime
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from fitbit_web_api.models.device import Device
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
@@ -13,7 +15,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .api import FitbitApi
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,7 +24,7 @@ TIMEOUT = 10
|
||||
type FitbitConfigEntry = ConfigEntry[FitbitData]
|
||||
|
||||
|
||||
class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
||||
"""Coordinator for fetching fitbit devices from the API."""
|
||||
|
||||
config_entry: FitbitConfigEntry
|
||||
@@ -41,7 +42,7 @@ class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
)
|
||||
self._api = api
|
||||
|
||||
async def _async_update_data(self) -> dict[str, FitbitDevice]:
|
||||
async def _async_update_data(self) -> dict[str, Device]:
|
||||
"""Fetch data from API endpoint."""
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
try:
|
||||
@@ -50,7 +51,7 @@ class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
except FitbitApiException as err:
|
||||
raise UpdateFailed(err) from err
|
||||
return {device.id: device for device in devices}
|
||||
return {device.id: device for device in devices if device.id is not None}
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"dependencies": ["application_credentials", "http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/fitbit",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["fitbit"],
|
||||
"requirements": ["fitbit==0.3.1"]
|
||||
"loggers": ["fitbit", "fitbit_web_api"],
|
||||
"requirements": ["fitbit==0.3.1", "fitbit-web-api==2.13.5"]
|
||||
}
|
||||
|
||||
@@ -21,26 +21,6 @@ class FitbitProfile:
|
||||
"""The locale defined in the user's Fitbit account settings."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitDevice:
|
||||
"""Device from the Fitbit API response."""
|
||||
|
||||
id: str
|
||||
"""The device ID."""
|
||||
|
||||
device_version: str
|
||||
"""The product name of the device."""
|
||||
|
||||
battery_level: int
|
||||
"""The battery level as a percentage."""
|
||||
|
||||
battery: str
|
||||
"""Returns the battery level of the device."""
|
||||
|
||||
type: str
|
||||
"""The type of the device such as TRACKER or SCALE."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitConfig:
|
||||
"""Information from the fitbit ConfigEntry data."""
|
||||
|
||||
@@ -8,6 +8,8 @@ import datetime
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
||||
from fitbit_web_api.models.device import Device
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -32,7 +34,7 @@ from .api import FitbitApi
|
||||
from .const import ATTRIBUTION, BATTERY_LEVELS, DOMAIN, FitbitScope, FitbitUnitSystem
|
||||
from .coordinator import FitbitConfigEntry, FitbitDeviceCoordinator
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice, config_from_entry_data
|
||||
from .model import config_from_entry_data
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
@@ -657,7 +659,7 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
coordinator: FitbitDeviceCoordinator,
|
||||
user_profile_id: str,
|
||||
description: FitbitSensorEntityDescription,
|
||||
device: FitbitDevice,
|
||||
device: Device,
|
||||
enable_default_override: bool,
|
||||
) -> None:
|
||||
"""Initialize the Fitbit sensor."""
|
||||
@@ -677,7 +679,9 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
@property
|
||||
def icon(self) -> str | None:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
if battery_level := BATTERY_LEVELS.get(self.device.battery):
|
||||
if self.device.battery is not None and (
|
||||
battery_level := BATTERY_LEVELS.get(self.device.battery)
|
||||
):
|
||||
return icon_for_battery_level(battery_level=battery_level)
|
||||
return self.entity_description.icon
|
||||
|
||||
@@ -697,7 +701,7 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.device = self.coordinator.data[self.device.id]
|
||||
self.device = self.coordinator.data[cast(str, self.device.id)]
|
||||
self._attr_native_value = self.device.battery
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -715,7 +719,7 @@ class FitbitBatteryLevelSensor(
|
||||
coordinator: FitbitDeviceCoordinator,
|
||||
user_profile_id: str,
|
||||
description: FitbitSensorEntityDescription,
|
||||
device: FitbitDevice,
|
||||
device: Device,
|
||||
) -> None:
|
||||
"""Initialize the Fitbit sensor."""
|
||||
super().__init__(coordinator)
|
||||
@@ -736,6 +740,6 @@ class FitbitBatteryLevelSensor(
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.device = self.coordinator.data[self.device.id]
|
||||
self.device = self.coordinator.data[cast(str, self.device.id)]
|
||||
self._attr_native_value = self.device.battery_level
|
||||
self.async_write_ha_state()
|
||||
|
||||
49
homeassistant/components/fressnapf_tracker/__init__.py
Normal file
49
homeassistant/components/fressnapf_tracker/__init__.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""The Fressnapf Tracker integration."""
|
||||
|
||||
from fressnapftracker import AuthClient
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_USER_ID
|
||||
from .coordinator import (
|
||||
FressnapfTrackerConfigEntry,
|
||||
FressnapfTrackerDataUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.DEVICE_TRACKER]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: FressnapfTrackerConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Fressnapf Tracker from a config entry."""
|
||||
auth_client = AuthClient(client=get_async_client(hass))
|
||||
devices = await auth_client.get_devices(
|
||||
user_id=entry.data[CONF_USER_ID],
|
||||
user_access_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
)
|
||||
|
||||
coordinators: list[FressnapfTrackerDataUpdateCoordinator] = []
|
||||
for device in devices:
|
||||
coordinator = FressnapfTrackerDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
device,
|
||||
)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
coordinators.append(coordinator)
|
||||
|
||||
entry.runtime_data = coordinators
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: FressnapfTrackerConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
193
homeassistant/components/fressnapf_tracker/config_flow.py
Normal file
193
homeassistant/components/fressnapf_tracker/config_flow.py
Normal file
@@ -0,0 +1,193 @@
|
||||
"""Config flow for the Fressnapf Tracker integration."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from fressnapftracker import (
|
||||
AuthClient,
|
||||
FressnapfTrackerInvalidPhoneNumberError,
|
||||
FressnapfTrackerInvalidTokenError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
|
||||
from .const import CONF_PHONE_NUMBER, CONF_SMS_CODE, CONF_USER_ID, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PHONE_NUMBER): str,
|
||||
}
|
||||
)
|
||||
STEP_SMS_CODE_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SMS_CODE): int,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class FressnapfTrackerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Fressnapf Tracker."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Init Config Flow."""
|
||||
self._context: dict[str, Any] = {}
|
||||
self._auth_client: AuthClient | None = None
|
||||
|
||||
@property
|
||||
def auth_client(self) -> AuthClient:
|
||||
"""Return the auth client, creating it if needed."""
|
||||
if self._auth_client is None:
|
||||
self._auth_client = AuthClient(client=get_async_client(self.hass))
|
||||
return self._auth_client
|
||||
|
||||
async def _async_request_sms_code(
|
||||
self, phone_number: str
|
||||
) -> tuple[dict[str, str], bool]:
|
||||
"""Request SMS code and return errors dict and success flag."""
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
response = await self.auth_client.request_sms_code(
|
||||
phone_number=phone_number
|
||||
)
|
||||
except FressnapfTrackerInvalidPhoneNumberError:
|
||||
errors["base"] = "invalid_phone_number"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
_LOGGER.debug("SMS code request response: %s", response)
|
||||
self._context[CONF_USER_ID] = response.id
|
||||
self._context[CONF_PHONE_NUMBER] = phone_number
|
||||
return errors, True
|
||||
return errors, False
|
||||
|
||||
async def _async_verify_sms_code(
|
||||
self, sms_code: int
|
||||
) -> tuple[dict[str, str], str | None]:
|
||||
"""Verify SMS code and return errors and access_token."""
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
verification_response = await self.auth_client.verify_phone_number(
|
||||
user_id=self._context[CONF_USER_ID],
|
||||
sms_code=sms_code,
|
||||
)
|
||||
except FressnapfTrackerInvalidTokenError:
|
||||
errors["base"] = "invalid_sms_code"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception during SMS code verification")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"Phone number verification response: %s", verification_response
|
||||
)
|
||||
return errors, verification_response.user_token.access_token
|
||||
return errors, None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_PHONE_NUMBER: user_input[CONF_PHONE_NUMBER]}
|
||||
)
|
||||
errors, success = await self._async_request_sms_code(
|
||||
user_input[CONF_PHONE_NUMBER]
|
||||
)
|
||||
if success:
|
||||
await self.async_set_unique_id(str(self._context[CONF_USER_ID]))
|
||||
self._abort_if_unique_id_configured()
|
||||
return await self.async_step_sms_code()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
|
||||
async def async_step_sms_code(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the SMS code step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
errors, access_token = await self._async_verify_sms_code(
|
||||
user_input[CONF_SMS_CODE]
|
||||
)
|
||||
if access_token:
|
||||
return self.async_create_entry(
|
||||
title=self._context[CONF_PHONE_NUMBER],
|
||||
data={
|
||||
CONF_PHONE_NUMBER: self._context[CONF_PHONE_NUMBER],
|
||||
CONF_USER_ID: self._context[CONF_USER_ID],
|
||||
CONF_ACCESS_TOKEN: access_token,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="sms_code",
|
||||
data_schema=STEP_SMS_CODE_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfiguration of the integration."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
errors, success = await self._async_request_sms_code(
|
||||
user_input[CONF_PHONE_NUMBER]
|
||||
)
|
||||
if success:
|
||||
if reconfigure_entry.data[CONF_USER_ID] != self._context[CONF_USER_ID]:
|
||||
errors["base"] = "account_change_not_allowed"
|
||||
else:
|
||||
return await self.async_step_reconfigure_sms_code()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_PHONE_NUMBER,
|
||||
default=reconfigure_entry.data.get(CONF_PHONE_NUMBER),
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure_sms_code(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the SMS code step during reconfiguration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
errors, access_token = await self._async_verify_sms_code(
|
||||
user_input[CONF_SMS_CODE]
|
||||
)
|
||||
if access_token:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reconfigure_entry(),
|
||||
data={
|
||||
CONF_PHONE_NUMBER: self._context[CONF_PHONE_NUMBER],
|
||||
CONF_USER_ID: self._context[CONF_USER_ID],
|
||||
CONF_ACCESS_TOKEN: access_token,
|
||||
},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure_sms_code",
|
||||
data_schema=STEP_SMS_CODE_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
)
|
||||
6
homeassistant/components/fressnapf_tracker/const.py
Normal file
6
homeassistant/components/fressnapf_tracker/const.py
Normal file
@@ -0,0 +1,6 @@
|
||||
"""Constants for the Fressnapf Tracker integration."""
|
||||
|
||||
DOMAIN = "fressnapf_tracker"
|
||||
CONF_PHONE_NUMBER = "phone_number"
|
||||
CONF_SMS_CODE = "sms_code"
|
||||
CONF_USER_ID = "user_id"
|
||||
50
homeassistant/components/fressnapf_tracker/coordinator.py
Normal file
50
homeassistant/components/fressnapf_tracker/coordinator.py
Normal file
@@ -0,0 +1,50 @@
|
||||
"""Data update coordinator for Fressnapf Tracker integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from fressnapftracker import ApiClient, Device, FressnapfTrackerError, Tracker
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.httpx_client import get_async_client
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type FressnapfTrackerConfigEntry = ConfigEntry[
|
||||
list[FressnapfTrackerDataUpdateCoordinator]
|
||||
]
|
||||
|
||||
|
||||
class FressnapfTrackerDataUpdateCoordinator(DataUpdateCoordinator[Tracker]):
|
||||
"""Class to manage fetching data from the API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: FressnapfTrackerConfigEntry,
|
||||
device: Device,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=timedelta(minutes=15),
|
||||
config_entry=config_entry,
|
||||
)
|
||||
self.device = device
|
||||
self.client = ApiClient(
|
||||
serial_number=device.serialnumber,
|
||||
device_token=device.token,
|
||||
client=get_async_client(hass),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> Tracker:
|
||||
try:
|
||||
return await self.client.get_tracker()
|
||||
except FressnapfTrackerError as exception:
|
||||
raise UpdateFailed(exception) from exception
|
||||
69
homeassistant/components/fressnapf_tracker/device_tracker.py
Normal file
69
homeassistant/components/fressnapf_tracker/device_tracker.py
Normal file
@@ -0,0 +1,69 @@
|
||||
"""Device tracker platform for fressnapf_tracker."""
|
||||
|
||||
from homeassistant.components.device_tracker import SourceType
|
||||
from homeassistant.components.device_tracker.config_entry import TrackerEntity
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import FressnapfTrackerConfigEntry, FressnapfTrackerDataUpdateCoordinator
|
||||
from .entity import FressnapfTrackerBaseEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: FressnapfTrackerConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the fressnapf_tracker device_trackers."""
|
||||
async_add_entities(
|
||||
FressnapfTrackerDeviceTracker(coordinator) for coordinator in entry.runtime_data
|
||||
)
|
||||
|
||||
|
||||
class FressnapfTrackerDeviceTracker(FressnapfTrackerBaseEntity, TrackerEntity):
|
||||
"""fressnapf_tracker device tracker."""
|
||||
|
||||
_attr_name = None
|
||||
_attr_translation_key = "pet"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: FressnapfTrackerDataUpdateCoordinator,
|
||||
) -> None:
|
||||
"""Initialize the device tracker."""
|
||||
super().__init__(coordinator)
|
||||
self._attr_unique_id = coordinator.device.serialnumber
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if entity is available."""
|
||||
return super().available and self.coordinator.data.position is not None
|
||||
|
||||
@property
|
||||
def latitude(self) -> float | None:
|
||||
"""Return latitude value of the device."""
|
||||
if self.coordinator.data.position is not None:
|
||||
return self.coordinator.data.position.lat
|
||||
return None
|
||||
|
||||
@property
|
||||
def longitude(self) -> float | None:
|
||||
"""Return longitude value of the device."""
|
||||
if self.coordinator.data.position is not None:
|
||||
return self.coordinator.data.position.lng
|
||||
return None
|
||||
|
||||
@property
|
||||
def source_type(self) -> SourceType:
|
||||
"""Return the source type, eg gps or router, of the device."""
|
||||
return SourceType.GPS
|
||||
|
||||
@property
|
||||
def location_accuracy(self) -> float:
|
||||
"""Return the location accuracy of the device.
|
||||
|
||||
Value in meters.
|
||||
"""
|
||||
if self.coordinator.data.position is not None:
|
||||
return float(self.coordinator.data.position.accuracy)
|
||||
return 0
|
||||
27
homeassistant/components/fressnapf_tracker/entity.py
Normal file
27
homeassistant/components/fressnapf_tracker/entity.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""fressnapf_tracker class."""
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from . import FressnapfTrackerDataUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
class FressnapfTrackerBaseEntity(
|
||||
CoordinatorEntity[FressnapfTrackerDataUpdateCoordinator]
|
||||
):
|
||||
"""Base entity for Fressnapf Tracker."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, coordinator: FressnapfTrackerDataUpdateCoordinator) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.id = coordinator.device.serialnumber
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, str(self.id))},
|
||||
name=str(self.coordinator.data.name),
|
||||
model=str(self.coordinator.data.tracker_settings.generation),
|
||||
manufacturer="Fressnapf",
|
||||
serial_number=str(self.id),
|
||||
)
|
||||
9
homeassistant/components/fressnapf_tracker/icons.json
Normal file
9
homeassistant/components/fressnapf_tracker/icons.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"device_tracker": {
|
||||
"pet": {
|
||||
"default": "mdi:paw"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
11
homeassistant/components/fressnapf_tracker/manifest.json
Normal file
11
homeassistant/components/fressnapf_tracker/manifest.json
Normal file
@@ -0,0 +1,11 @@
|
||||
{
|
||||
"domain": "fressnapf_tracker",
|
||||
"name": "Fressnapf Tracker",
|
||||
"codeowners": ["@eifinger"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/fressnapf_tracker",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fressnapftracker==0.1.2"]
|
||||
}
|
||||
@@ -0,0 +1,68 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: done
|
||||
integration-owner: todo
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class: todo
|
||||
entity-disabled-by-default: todo
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: No entities to translate
|
||||
exception-translations: todo
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: done
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
49
homeassistant/components/fressnapf_tracker/strings.json
Normal file
49
homeassistant/components/fressnapf_tracker/strings.json
Normal file
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"account_change_not_allowed": "Reconfiguring to a different account is not allowed. Please create a new entry instead.",
|
||||
"invalid_phone_number": "Please enter a valid phone number.",
|
||||
"invalid_sms_code": "The SMS code you entered is invalid.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"phone_number": "[%key:component::fressnapf_tracker::config::step::user::data::phone_number%]"
|
||||
},
|
||||
"data_description": {
|
||||
"phone_number": "[%key:component::fressnapf_tracker::config::step::user::data_description::phone_number%]"
|
||||
},
|
||||
"description": "Re-authenticate with your Fressnapf Tracker account to refresh your credentials."
|
||||
},
|
||||
"reconfigure_sms_code": {
|
||||
"data": {
|
||||
"sms_code": "[%key:component::fressnapf_tracker::config::step::sms_code::data::sms_code%]"
|
||||
},
|
||||
"data_description": {
|
||||
"sms_code": "[%key:component::fressnapf_tracker::config::step::sms_code::data_description::sms_code%]"
|
||||
}
|
||||
},
|
||||
"sms_code": {
|
||||
"data": {
|
||||
"sms_code": "SMS code"
|
||||
},
|
||||
"data_description": {
|
||||
"sms_code": "Enter the SMS code you received on your phone."
|
||||
}
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"phone_number": "Phone number"
|
||||
},
|
||||
"data_description": {
|
||||
"phone_number": "Enter your phone number in international format (e.g., +4917612345678)."
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251105.1"]
|
||||
"requirements": ["home-assistant-frontend==20251201.0"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Callable, Coroutine
|
||||
from functools import wraps
|
||||
from typing import Any
|
||||
@@ -15,7 +16,9 @@ from homeassistant.helpers import singleton
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
|
||||
DATA_STORAGE: HassKey[dict[str, UserStore]] = HassKey("frontend_storage")
|
||||
DATA_STORAGE: HassKey[dict[str, asyncio.Future[UserStore]]] = HassKey(
|
||||
"frontend_storage"
|
||||
)
|
||||
DATA_SYSTEM_STORAGE: HassKey[SystemStore] = HassKey("frontend_system_storage")
|
||||
STORAGE_VERSION_USER_DATA = 1
|
||||
STORAGE_VERSION_SYSTEM_DATA = 1
|
||||
@@ -34,11 +37,18 @@ async def async_setup_frontend_storage(hass: HomeAssistant) -> None:
|
||||
async def async_user_store(hass: HomeAssistant, user_id: str) -> UserStore:
|
||||
"""Access a user store."""
|
||||
stores = hass.data.setdefault(DATA_STORAGE, {})
|
||||
if (store := stores.get(user_id)) is None:
|
||||
store = stores[user_id] = UserStore(hass, user_id)
|
||||
await store.async_load()
|
||||
if (future := stores.get(user_id)) is None:
|
||||
future = stores[user_id] = hass.loop.create_future()
|
||||
store = UserStore(hass, user_id)
|
||||
try:
|
||||
await store.async_load()
|
||||
except BaseException as ex:
|
||||
del stores[user_id]
|
||||
future.set_exception(ex)
|
||||
raise
|
||||
future.set_result(store)
|
||||
|
||||
return store
|
||||
return await future
|
||||
|
||||
|
||||
class UserStore:
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_air_quality_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["google_air_quality_api==1.1.1"]
|
||||
"requirements": ["google_air_quality_api==1.1.3"]
|
||||
}
|
||||
|
||||
@@ -132,7 +132,6 @@
|
||||
"heavily_polluted": "Heavily polluted",
|
||||
"heavy_air_pollution": "Heavy air pollution",
|
||||
"high_air_pollution": "High air pollution",
|
||||
"high_air_quality": "High air pollution",
|
||||
"high_health_risk": "High health risk",
|
||||
"horrible_air_quality": "Horrible air quality",
|
||||
"light_air_pollution": "Light air pollution",
|
||||
@@ -165,20 +164,18 @@
|
||||
"slightly_polluted": "Slightly polluted",
|
||||
"sufficient_air_quality": "Sufficient air quality",
|
||||
"unfavorable_air_quality": "Unfavorable air quality",
|
||||
"unfavorable_sensitive": "Unfavorable air quality for sensitive groups",
|
||||
"unfavorable_air_quality_for_sensitive_groups": "Unfavorable air quality for sensitive groups",
|
||||
"unhealthy_air_quality": "Unhealthy air quality",
|
||||
"unhealthy_sensitive": "Unhealthy air quality for sensitive groups",
|
||||
"unsatisfactory_air_quality": "Unsatisfactory air quality",
|
||||
"very_bad_air_quality": "Very bad air quality",
|
||||
"very_good_air_quality": "Very good air quality",
|
||||
"very_high_air_pollution": "Very high air pollution",
|
||||
"very_high_air_quality": "Very High air pollution",
|
||||
"very_high_health_risk": "Very high health risk",
|
||||
"very_low_air_pollution": "Very low air pollution",
|
||||
"very_polluted": "Very polluted",
|
||||
"very_poor_air_quality": "Very poor air quality",
|
||||
"very_unfavorable_air_quality": "Very unfavorable air quality",
|
||||
"very_unhealthy": "Very unhealthy air quality",
|
||||
"very_unhealthy_air_quality": "Very unhealthy air quality",
|
||||
"warning_air_pollution": "Warning level air pollution"
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_IGNORE_NON_NUMERIC, DOMAIN
|
||||
from .entity import GroupEntity
|
||||
@@ -374,7 +374,7 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Query all members and determine the sensor group state."""
|
||||
self.calculate_state_attributes(self._get_valid_entities())
|
||||
states: list[StateType] = []
|
||||
states: list[str] = []
|
||||
valid_units = self._valid_units
|
||||
valid_states: list[bool] = []
|
||||
sensor_values: list[tuple[str, float, State]] = []
|
||||
|
||||
@@ -211,7 +211,7 @@ async def ws_start_preview(
|
||||
|
||||
@callback
|
||||
def async_preview_updated(
|
||||
last_exception: Exception | None, state: str, attributes: Mapping[str, Any]
|
||||
last_exception: BaseException | None, state: str, attributes: Mapping[str, Any]
|
||||
) -> None:
|
||||
"""Forward config entry state events to websocket."""
|
||||
if last_exception:
|
||||
|
||||
@@ -241,7 +241,9 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
|
||||
async def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[Exception | None, str, Mapping[str, Any]], None],
|
||||
preview_callback: Callable[
|
||||
[BaseException | None, str, Mapping[str, Any]], None
|
||||
],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -33,13 +33,14 @@ from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.hassio import is_hassio
|
||||
|
||||
from .const import OTBR_DOMAIN, Z2M_EMBER_DOCS_URL, ZHA_DOMAIN
|
||||
from .const import DOMAIN, OTBR_DOMAIN, Z2M_EMBER_DOCS_URL, ZHA_DOMAIN
|
||||
from .util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
OwningAddon,
|
||||
OwningIntegration,
|
||||
ResetTarget,
|
||||
async_firmware_flashing_context,
|
||||
async_flash_silabs_firmware,
|
||||
get_otbr_addon_manager,
|
||||
guess_firmware_info,
|
||||
@@ -228,83 +229,95 @@ class BaseFirmwareInstallFlow(ConfigEntryBaseFlow, ABC):
|
||||
# Keep track of the firmware we're working with, for error messages
|
||||
self.installing_firmware_name = firmware_name
|
||||
|
||||
# Installing new firmware is only truly required if the wrong type is
|
||||
# installed: upgrading to the latest release of the current firmware type
|
||||
# isn't strictly necessary for functionality.
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(
|
||||
self._device,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
)
|
||||
|
||||
firmware_install_required = self._probed_firmware_info is None or (
|
||||
self._probed_firmware_info.firmware_type != expected_installed_firmware_type
|
||||
)
|
||||
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = FirmwareUpdateClient(fw_update_url, session)
|
||||
|
||||
try:
|
||||
manifest = await client.async_update_data()
|
||||
fw_manifest = next(
|
||||
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
|
||||
# For the duration of firmware flashing, hint to other integrations (i.e. ZHA)
|
||||
# that the hardware is in use and should not be accessed. This is separate from
|
||||
# locking the serial port itself, since a momentary release of the port may
|
||||
# still allow for ZHA to reclaim the device.
|
||||
async with async_firmware_flashing_context(self.hass, self._device, DOMAIN):
|
||||
# Installing new firmware is only truly required if the wrong type is
|
||||
# installed: upgrading to the latest release of the current firmware type
|
||||
# isn't strictly necessary for functionality.
|
||||
self._probed_firmware_info = await probe_silabs_firmware_info(
|
||||
self._device,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
)
|
||||
except (StopIteration, TimeoutError, ClientError, ManifestMissing) as err:
|
||||
_LOGGER.warning("Failed to fetch firmware update manifest", exc_info=True)
|
||||
|
||||
# Not having internet access should not prevent setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug("Skipping firmware upgrade due to index download failure")
|
||||
return
|
||||
firmware_install_required = self._probed_firmware_info is None or (
|
||||
self._probed_firmware_info.firmware_type
|
||||
!= expected_installed_firmware_type
|
||||
)
|
||||
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
session = async_get_clientsession(self.hass)
|
||||
client = FirmwareUpdateClient(fw_update_url, session)
|
||||
|
||||
if not firmware_install_required:
|
||||
assert self._probed_firmware_info is not None
|
||||
|
||||
# Make sure we do not downgrade the firmware
|
||||
fw_metadata = NabuCasaMetadata.from_json(fw_manifest.metadata)
|
||||
fw_version = fw_metadata.get_public_version()
|
||||
probed_fw_version = Version(self._probed_firmware_info.firmware_version)
|
||||
|
||||
if probed_fw_version >= fw_version:
|
||||
_LOGGER.debug(
|
||||
"Not downgrading firmware, installed %s is newer than available %s",
|
||||
probed_fw_version,
|
||||
fw_version,
|
||||
try:
|
||||
manifest = await client.async_update_data()
|
||||
fw_manifest = next(
|
||||
fw for fw in manifest.firmwares if fw.filename.startswith(fw_type)
|
||||
)
|
||||
except (StopIteration, TimeoutError, ClientError, ManifestMissing) as err:
|
||||
_LOGGER.warning(
|
||||
"Failed to fetch firmware update manifest", exc_info=True
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
fw_data = await client.async_fetch_firmware(fw_manifest)
|
||||
except (TimeoutError, ClientError, ValueError) as err:
|
||||
_LOGGER.warning("Failed to fetch firmware update", exc_info=True)
|
||||
# Not having internet access should not prevent setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug(
|
||||
"Skipping firmware upgrade due to index download failure"
|
||||
)
|
||||
return
|
||||
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
|
||||
# If we cannot download new firmware, we shouldn't block setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug("Skipping firmware upgrade due to image download failure")
|
||||
return
|
||||
assert self._probed_firmware_info is not None
|
||||
|
||||
# Otherwise, fail
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
# Make sure we do not downgrade the firmware
|
||||
fw_metadata = NabuCasaMetadata.from_json(fw_manifest.metadata)
|
||||
fw_version = fw_metadata.get_public_version()
|
||||
probed_fw_version = Version(self._probed_firmware_info.firmware_version)
|
||||
|
||||
self._probed_firmware_info = await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
device=self._device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=expected_installed_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
progress_callback=lambda offset, total: self.async_update_progress(
|
||||
offset / total
|
||||
),
|
||||
)
|
||||
if probed_fw_version >= fw_version:
|
||||
_LOGGER.debug(
|
||||
"Not downgrading firmware, installed %s is newer than available %s",
|
||||
probed_fw_version,
|
||||
fw_version,
|
||||
)
|
||||
return
|
||||
|
||||
try:
|
||||
fw_data = await client.async_fetch_firmware(fw_manifest)
|
||||
except (TimeoutError, ClientError, ValueError) as err:
|
||||
_LOGGER.warning("Failed to fetch firmware update", exc_info=True)
|
||||
|
||||
# If we cannot download new firmware, we shouldn't block setup
|
||||
if not firmware_install_required:
|
||||
_LOGGER.debug(
|
||||
"Skipping firmware upgrade due to image download failure"
|
||||
)
|
||||
return
|
||||
|
||||
# Otherwise, fail
|
||||
raise AbortFlow(
|
||||
reason="fw_download_failed",
|
||||
description_placeholders=self._get_translation_placeholders(),
|
||||
) from err
|
||||
|
||||
self._probed_firmware_info = await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
device=self._device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=expected_installed_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
progress_callback=lambda offset, total: self.async_update_progress(
|
||||
offset / total
|
||||
),
|
||||
)
|
||||
|
||||
async def _configure_and_start_otbr_addon(self) -> None:
|
||||
"""Configure and start the OTBR addon."""
|
||||
|
||||
@@ -26,6 +26,7 @@ from .util import (
|
||||
ApplicationType,
|
||||
FirmwareInfo,
|
||||
ResetTarget,
|
||||
async_firmware_flashing_context,
|
||||
async_flash_silabs_firmware,
|
||||
)
|
||||
|
||||
@@ -274,16 +275,18 @@ class BaseFirmwareUpdateEntity(
|
||||
)
|
||||
|
||||
try:
|
||||
firmware_info = await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
device=self._current_device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=self.entity_description.expected_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
progress_callback=self._update_progress,
|
||||
domain=self._config_entry.domain,
|
||||
)
|
||||
async with async_firmware_flashing_context(
|
||||
self.hass, self._current_device, self._config_entry.domain
|
||||
):
|
||||
firmware_info = await async_flash_silabs_firmware(
|
||||
hass=self.hass,
|
||||
device=self._current_device,
|
||||
fw_data=fw_data,
|
||||
expected_installed_firmware_type=self.entity_description.expected_firmware_type,
|
||||
bootloader_reset_methods=self.BOOTLOADER_RESET_METHODS,
|
||||
application_probe_methods=self.APPLICATION_PROBE_METHODS,
|
||||
progress_callback=self._update_progress,
|
||||
)
|
||||
finally:
|
||||
self._attr_in_progress = False
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -26,7 +26,6 @@ from homeassistant.helpers.singleton import singleton
|
||||
|
||||
from . import DATA_COMPONENT
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
OTBR_ADDON_MANAGER_DATA,
|
||||
OTBR_ADDON_NAME,
|
||||
OTBR_ADDON_SLUG,
|
||||
@@ -366,6 +365,22 @@ async def probe_silabs_firmware_type(
|
||||
return fw_info.firmware_type
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def async_firmware_flashing_context(
|
||||
hass: HomeAssistant, device: str, source_domain: str
|
||||
) -> AsyncIterator[None]:
|
||||
"""Register a device as having its firmware being actively interacted with."""
|
||||
async with async_firmware_update_context(hass, device, source_domain):
|
||||
firmware_info = await guess_firmware_info(hass, device)
|
||||
_LOGGER.debug("Guessed firmware info before update: %s", firmware_info)
|
||||
|
||||
async with AsyncExitStack() as stack:
|
||||
for owner in firmware_info.owners:
|
||||
await stack.enter_async_context(owner.temporarily_stop(hass))
|
||||
|
||||
yield
|
||||
|
||||
|
||||
async def async_flash_silabs_firmware(
|
||||
hass: HomeAssistant,
|
||||
device: str,
|
||||
@@ -374,10 +389,11 @@ async def async_flash_silabs_firmware(
|
||||
bootloader_reset_methods: Sequence[ResetTarget],
|
||||
application_probe_methods: Sequence[tuple[ApplicationType, int]],
|
||||
progress_callback: Callable[[int, int], None] | None = None,
|
||||
*,
|
||||
domain: str = DOMAIN,
|
||||
) -> FirmwareInfo:
|
||||
"""Flash firmware to the SiLabs device."""
|
||||
"""Flash firmware to the SiLabs device.
|
||||
|
||||
This function is meant to be used within a firmware update context.
|
||||
"""
|
||||
if not any(
|
||||
method == expected_installed_firmware_type
|
||||
for method, _ in application_probe_methods
|
||||
@@ -387,54 +403,44 @@ async def async_flash_silabs_firmware(
|
||||
f" not in application probe methods {application_probe_methods!r}"
|
||||
)
|
||||
|
||||
async with async_firmware_update_context(hass, device, domain):
|
||||
firmware_info = await guess_firmware_info(hass, device)
|
||||
_LOGGER.debug("Identified firmware info: %s", firmware_info)
|
||||
fw_image = await hass.async_add_executor_job(parse_firmware_image, fw_data)
|
||||
|
||||
fw_image = await hass.async_add_executor_job(parse_firmware_image, fw_data)
|
||||
flasher = Flasher(
|
||||
device=device,
|
||||
probe_methods=tuple(
|
||||
(m.as_flasher_application_type(), baudrate)
|
||||
for m, baudrate in application_probe_methods
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
),
|
||||
)
|
||||
|
||||
flasher = Flasher(
|
||||
device=device,
|
||||
probe_methods=tuple(
|
||||
(m.as_flasher_application_type(), baudrate)
|
||||
for m, baudrate in application_probe_methods
|
||||
),
|
||||
bootloader_reset=tuple(
|
||||
m.as_flasher_reset_target() for m in bootloader_reset_methods
|
||||
),
|
||||
)
|
||||
try:
|
||||
# Enter the bootloader with indeterminate progress
|
||||
await flasher.enter_bootloader()
|
||||
|
||||
async with AsyncExitStack() as stack:
|
||||
for owner in firmware_info.owners:
|
||||
await stack.enter_async_context(owner.temporarily_stop(hass))
|
||||
# Flash the firmware, with progress
|
||||
await flasher.flash_firmware(fw_image, progress_callback=progress_callback)
|
||||
except PermissionError as err:
|
||||
raise HomeAssistantError(
|
||||
"Failed to flash firmware: Device is used by another application"
|
||||
) from err
|
||||
except Exception as err:
|
||||
raise HomeAssistantError("Failed to flash firmware") from err
|
||||
|
||||
try:
|
||||
# Enter the bootloader with indeterminate progress
|
||||
await flasher.enter_bootloader()
|
||||
probed_firmware_info = await probe_silabs_firmware_info(
|
||||
device,
|
||||
bootloader_reset_methods=bootloader_reset_methods,
|
||||
# Only probe for the expected installed firmware type
|
||||
application_probe_methods=[
|
||||
(method, baudrate)
|
||||
for method, baudrate in application_probe_methods
|
||||
if method == expected_installed_firmware_type
|
||||
],
|
||||
)
|
||||
|
||||
# Flash the firmware, with progress
|
||||
await flasher.flash_firmware(
|
||||
fw_image, progress_callback=progress_callback
|
||||
)
|
||||
except PermissionError as err:
|
||||
raise HomeAssistantError(
|
||||
"Failed to flash firmware: Device is used by another application"
|
||||
) from err
|
||||
except Exception as err:
|
||||
raise HomeAssistantError("Failed to flash firmware") from err
|
||||
if probed_firmware_info is None:
|
||||
raise HomeAssistantError("Failed to probe the firmware after flashing")
|
||||
|
||||
probed_firmware_info = await probe_silabs_firmware_info(
|
||||
device,
|
||||
bootloader_reset_methods=bootloader_reset_methods,
|
||||
# Only probe for the expected installed firmware type
|
||||
application_probe_methods=[
|
||||
(method, baudrate)
|
||||
for method, baudrate in application_probe_methods
|
||||
if method == expected_installed_firmware_type
|
||||
],
|
||||
)
|
||||
|
||||
if probed_firmware_info is None:
|
||||
raise HomeAssistantError("Failed to probe the firmware after flashing")
|
||||
|
||||
return probed_firmware_info
|
||||
return probed_firmware_info
|
||||
|
||||
@@ -12,6 +12,7 @@
|
||||
"config_flow": true,
|
||||
"dependencies": ["bluetooth_adapters", "zeroconf"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.20"],
|
||||
|
||||
@@ -9,6 +9,7 @@
|
||||
}
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/homewizard",
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"not_implemented": "This integration can only be setup via discovery."
|
||||
"not_implemented": "This integration can only be set up via discovery."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
|
||||
@@ -11,6 +11,7 @@ from random import random
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.labs import async_is_preview_feature_enabled, async_listen
|
||||
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
@@ -30,7 +31,7 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, ServiceResponse, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
@@ -38,7 +39,6 @@ from homeassistant.helpers.issue_registry import (
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.labs import async_is_preview_feature_enabled, async_listen
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import (
|
||||
@@ -81,11 +81,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
@callback
|
||||
def service_handler(call: ServiceCall | None = None) -> None:
|
||||
def service_handler(call: ServiceCall | None = None) -> ServiceResponse:
|
||||
"""Do nothing."""
|
||||
return None
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, "test_service_1", service_handler, SCHEMA_SERVICE_TEST_SERVICE_1
|
||||
DOMAIN,
|
||||
"test_service_1",
|
||||
service_handler,
|
||||
SCHEMA_SERVICE_TEST_SERVICE_1,
|
||||
description_placeholders={
|
||||
"meep_1": "foo",
|
||||
"meep_2": "bar",
|
||||
"meep_3": "beer",
|
||||
"meep_4": "milk",
|
||||
"meep_5": "https://example.com",
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -117,14 +117,16 @@
|
||||
},
|
||||
"services": {
|
||||
"test_service_1": {
|
||||
"description": "Fake action for testing",
|
||||
"description": "Fake action for testing {meep_2}",
|
||||
"fields": {
|
||||
"field_1": {
|
||||
"description": "Number of seconds",
|
||||
"name": "Field 1"
|
||||
"description": "Number of seconds {meep_4}",
|
||||
"example": "Example: {meep_5}",
|
||||
"name": "Field 1 {meep_3}"
|
||||
},
|
||||
"field_2": {
|
||||
"description": "Mode",
|
||||
"example": "Field 2 example",
|
||||
"name": "Field 2"
|
||||
},
|
||||
"field_3": {
|
||||
@@ -136,7 +138,7 @@
|
||||
"name": "Field 4"
|
||||
}
|
||||
},
|
||||
"name": "Test action 1",
|
||||
"name": "Test action {meep_1}",
|
||||
"sections": {
|
||||
"advanced_fields": {
|
||||
"description": "Some very advanced things",
|
||||
|
||||
@@ -39,6 +39,10 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_DESCRIPTION_PLACEHOLDERS = {
|
||||
"sensor_value_types_url": "https://www.home-assistant.io/integrations/knx/#value-types"
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@@ -48,6 +52,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_SEND,
|
||||
service_send_to_knx_bus,
|
||||
schema=SERVICE_KNX_SEND_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
@@ -63,6 +68,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_EVENT_REGISTER,
|
||||
service_event_register_modify,
|
||||
schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async_register_admin_service(
|
||||
@@ -71,6 +77,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_EXPOSURE_REGISTER,
|
||||
service_exposure_register_modify,
|
||||
schema=SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async_register_admin_service(
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user