Compare commits

..

12 Commits

Author SHA1 Message Date
J. Nick Koston
29b3712836 dry it up 2025-11-23 14:59:38 -08:00
J. Nick Koston
9099cae4db Merge remote-tracking branch 'upstream/dev' into aioshelly_13210_user_flow 2025-11-23 14:52:50 -08:00
J. Nick Koston
a53036ca2c tweak 2025-11-23 14:52:25 -08:00
J. Nick Koston
46ba228d4f cover 2025-11-23 14:44:29 -08:00
J. Nick Koston
e36a62b0d6 fixes 2025-11-23 13:21:16 -08:00
J. Nick Koston
adc55258c6 dry it up 2025-11-23 12:06:57 -08:00
J. Nick Koston
c330bebf4c cover 2025-11-23 11:59:21 -08:00
J. Nick Koston
d52152003b more cover 2025-11-23 11:53:20 -08:00
J. Nick Koston
be19fef6dd fixes 2025-11-23 11:31:15 -08:00
J. Nick Koston
2b35b7fc65 Merge remote-tracking branch 'upstream/dev' into aioshelly_13210_user_flow 2025-11-23 11:26:00 -08:00
J. Nick Koston
643c1a2259 rework shelly user flow to show discovered devices 2025-11-23 11:24:45 -08:00
J. Nick Koston
963ebfaf3b Bump aioshelly to 13.21.0
changelog: https://github.com/home-assistant-libs/aioshelly/compare/13.20.0...13.21.0
2025-11-23 10:44:26 -08:00
515 changed files with 8320 additions and 42274 deletions

View File

@@ -14,9 +14,6 @@ env:
PIP_TIMEOUT: 60
UV_HTTP_TIMEOUT: 60
UV_SYSTEM_PYTHON: "true"
# Base image version from https://github.com/home-assistant/docker
BASE_IMAGE_VERSION: "2025.11.3"
ARCHITECTURES: '["amd64", "aarch64"]'
jobs:
init:
@@ -24,16 +21,18 @@ jobs:
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
outputs:
architectures: ${{ steps.info.outputs.architectures }}
version: ${{ steps.version.outputs.version }}
channel: ${{ steps.version.outputs.channel }}
publish: ${{ steps.version.outputs.publish }}
architectures: ${{ env.ARCHITECTURES }}
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
with:
fetch-depth: 0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -80,7 +79,7 @@ jobs:
name: Build ${{ matrix.arch }} base core image
if: github.repository_owner == 'home-assistant'
needs: init
runs-on: ${{ matrix.os }}
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
@@ -89,11 +88,6 @@ jobs:
fail-fast: false
matrix:
arch: ${{ fromJson(needs.init.outputs.architectures) }}
include:
- arch: amd64
os: ubuntu-latest
- arch: aarch64
os: ubuntu-24.04-arm
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
@@ -122,7 +116,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
if: needs.init.outputs.channel == 'dev'
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
@@ -190,59 +184,16 @@ jobs:
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
cosign-release: "v2.5.3"
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Build variables
id: vars
shell: bash
run: |
echo "base_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ env.BASE_IMAGE_VERSION }}" >> "$GITHUB_OUTPUT"
echo "cache_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:latest" >> "$GITHUB_OUTPUT"
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
- name: Verify base image signature
run: |
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
"${{ steps.vars.outputs.base_image }}"
- name: Verify cache image signature
id: cache
continue-on-error: true
run: |
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
"${{ steps.vars.outputs.cache_image }}"
# home-assistant/builder doesn't support sha pinning
- name: Build base image
id: build
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
uses: home-assistant/builder@2025.09.0
with:
context: .
file: ./Dockerfile
platforms: ${{ steps.vars.outputs.platform }}
push: true
cache-from: ${{ steps.cache.outcome == 'success' && steps.vars.outputs.cache_image || '' }}
build-args: |
BUILD_FROM=${{ steps.vars.outputs.base_image }}
tags: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
labels: |
io.hass.arch=${{ matrix.arch }}
io.hass.version=${{ needs.init.outputs.version }}
org.opencontainers.image.created=${{ steps.vars.outputs.created }}
org.opencontainers.image.version=${{ needs.init.outputs.version }}
- name: Sign image
run: |
cosign sign --yes "ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}@${{ steps.build.outputs.digest }}"
args: |
$BUILD_ARGS \
--${{ matrix.arch }} \
--cosign \
--target /data \
--generic ${{ needs.init.outputs.version }}
build_machine:
name: Build ${{ matrix.machine }} machine core image
@@ -353,6 +304,9 @@ jobs:
matrix:
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
steps:
- name: Checkout the repository
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Install Cosign
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
with:
@@ -366,94 +320,88 @@ jobs:
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Login to GitHub Container Registry
if: matrix.registry == 'ghcr.io/home-assistant'
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Verify architecture image signatures
- name: Build Meta Image
shell: bash
run: |
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
for arch in $ARCHS; do
echo "Verifying ${arch} image signature..."
cosign verify \
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
done
echo "✓ All images verified successfully"
export DOCKER_CLI_EXPERIMENTAL=enabled
# Generate all Docker tags based on version string
# Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
# Examples:
# 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
# 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
- name: Generate Docker metadata
id: meta
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
with:
images: ${{ matrix.registry }}/home-assistant
sep-tags: ","
tags: |
type=raw,value=${{ needs.init.outputs.version }},priority=9999
type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
function create_manifest() {
local tag_l=${1}
local tag_r=${2}
local registry=${{ matrix.registry }}
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@aa33708b10e362ff993539393ff100fa93ed6a27 # v3.7.1
docker manifest create "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
"${registry}/aarch64-homeassistant:${tag_r}"
- name: Copy architecture images to DockerHub
if: matrix.registry == 'docker.io/homeassistant'
shell: bash
run: |
# Use imagetools to copy image blobs directly between registries
# This preserves provenance/attestations and seems to be much faster than pull/push
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
for arch in $ARCHS; do
echo "Copying ${arch} image to DockerHub..."
docker buildx imagetools create \
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
done
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/amd64-homeassistant:${tag_r}" \
--os linux --arch amd64
- name: Create and push multi-arch manifests
shell: bash
run: |
# Build list of architecture images dynamically
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
ARCH_IMAGES=()
for arch in $ARCHS; do
ARCH_IMAGES+=("${{ matrix.registry }}/${arch}-homeassistant:${{ needs.init.outputs.version }}")
done
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
"${registry}/aarch64-homeassistant:${tag_r}" \
--os linux --arch arm64 --variant=v8
# Build list of all tags for single manifest creation
# Note: Using sep-tags=',' in metadata-action for easier parsing
TAG_ARGS=()
IFS=',' read -ra TAGS <<< "${{ steps.meta.outputs.tags }}"
for tag in "${TAGS[@]}"; do
TAG_ARGS+=("--tag" "${tag}")
done
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
cosign sign --yes "${registry}/home-assistant:${tag_l}"
}
# Create manifest with ALL tags in a single operation (much faster!)
echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
function validate_image() {
local image=${1}
if ! cosign verify --certificate-oidc-issuer https://token.actions.githubusercontent.com --certificate-identity-regexp https://github.com/home-assistant/core/.* "${image}"; then
echo "Invalid signature!"
exit 1
fi
}
# Sign each tag separately (signing requires individual tag names)
echo "Signing all tags..."
for tag in "${TAGS[@]}"; do
echo "Signing ${tag}"
cosign sign --yes "${tag}"
done
function push_dockerhub() {
local image=${1}
local tag=${2}
echo "All manifests created and signed successfully"
docker tag "ghcr.io/home-assistant/${image}:${tag}" "docker.io/homeassistant/${image}:${tag}"
docker push "docker.io/homeassistant/${image}:${tag}"
cosign sign --yes "docker.io/homeassistant/${image}:${tag}"
}
# Pull images from github container registry and verify signature
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
# Upload images to dockerhub
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
fi
# Create version tag
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
# Create general tags
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
create_manifest "dev" "${{ needs.init.outputs.version }}"
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
create_manifest "beta" "${{ needs.init.outputs.version }}"
create_manifest "rc" "${{ needs.init.outputs.version }}"
else
create_manifest "stable" "${{ needs.init.outputs.version }}"
create_manifest "latest" "${{ needs.init.outputs.version }}"
create_manifest "beta" "${{ needs.init.outputs.version }}"
create_manifest "rc" "${{ needs.init.outputs.version }}"
# Create series version tag (e.g. 2021.6)
v="${{ needs.init.outputs.version }}"
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
fi
build_python:
name: Build PyPi package
@@ -469,7 +417,7 @@ jobs:
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -40,7 +40,7 @@ env:
CACHE_VERSION: 2
UV_CACHE_VERSION: 1
MYPY_CACHE_VERSION: 1
HA_SHORT_VERSION: "2026.1"
HA_SHORT_VERSION: "2025.12"
DEFAULT_PYTHON: "3.13"
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
# 10.3 is the oldest supported version
@@ -257,7 +257,7 @@ jobs:
- &setup-python-default
name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: &actions-setup-python actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Initialize CodeQL
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
with:
category: "/language:python"

View File

@@ -231,7 +231,7 @@ jobs:
- name: Detect duplicates using AI
id: ai_detection
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
with:
model: openai/gpt-4o
system-prompt: |

View File

@@ -57,7 +57,7 @@ jobs:
- name: Detect language using AI
id: ai_language_detection
if: steps.detect_language.outputs.should_continue == 'true'
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
with:
model: openai/gpt-4o-mini
system-prompt: |

View File

@@ -22,7 +22,7 @@ jobs:
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}

View File

@@ -28,6 +28,8 @@ jobs:
name: Initialize wheels builder
if: github.repository_owner == 'home-assistant'
runs-on: ubuntu-latest
outputs:
architectures: ${{ steps.info.outputs.architectures }}
steps:
- &checkout
name: Checkout the repository
@@ -35,7 +37,7 @@ jobs:
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
@@ -48,6 +50,10 @@ jobs:
pip install "$(grep '^uv' < requirements.txt)"
uv pip install -r requirements.txt
- name: Get information
id: info
uses: home-assistant/actions/helpers/info@master
- name: Create requirements_diff file
run: |
if [[ ${{ github.event_name }} =~ (schedule|workflow_dispatch) ]]; then
@@ -108,10 +114,9 @@ jobs:
fail-fast: false
matrix: &matrix-build
abi: ["cp313", "cp314"]
arch: ["amd64", "aarch64"]
arch: ${{ fromJson(needs.init.outputs.architectures) }}
include:
- arch: amd64
os: ubuntu-latest
- os: ubuntu-latest
- arch: aarch64
os: ubuntu-24.04-arm
steps:
@@ -135,8 +140,9 @@ jobs:
sed -i "/uv/d" requirements.txt
sed -i "/uv/d" requirements_diff.txt
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: &home-assistant-wheels home-assistant/wheels@6066c17a2a4aafcf7bdfeae01717f63adfcdba98 # 2025.11.0
uses: &home-assistant-wheels home-assistant/wheels@2025.10.0
with:
abi: ${{ matrix.abi }}
tag: musllinux_1_2
@@ -177,6 +183,7 @@ jobs:
sed -i "/uv/d" requirements.txt
sed -i "/uv/d" requirements_diff.txt
# home-assistant/wheels doesn't support sha pinning
- name: Build wheels
uses: *home-assistant-wheels
with:

View File

@@ -94,7 +94,7 @@ repos:
pass_filenames: false
language: script
types: [text]
files: ^(script/hassfest/(metadata|docker)\.py|homeassistant/const\.py$|pyproject\.toml)$
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
- id: hassfest-mypy-config
name: hassfest-mypy-config
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config

View File

@@ -120,6 +120,7 @@ homeassistant.components.blueprint.*
homeassistant.components.bluesound.*
homeassistant.components.bluetooth.*
homeassistant.components.bluetooth_adapters.*
homeassistant.components.bluetooth_tracker.*
homeassistant.components.bmw_connected_drive.*
homeassistant.components.bond.*
homeassistant.components.bosch_alarm.*
@@ -187,7 +188,6 @@ homeassistant.components.elkm1.*
homeassistant.components.emulated_hue.*
homeassistant.components.energenie_power_sockets.*
homeassistant.components.energy.*
homeassistant.components.energyid.*
homeassistant.components.energyzero.*
homeassistant.components.enigma2.*
homeassistant.components.enphase_envoy.*

14
CODEOWNERS generated
View File

@@ -121,8 +121,6 @@ build.json @home-assistant/supervisor
/tests/components/androidtv/ @JeffLIrion @ollo69
/homeassistant/components/androidtv_remote/ @tronikos @Drafteed
/tests/components/androidtv_remote/ @tronikos @Drafteed
/homeassistant/components/anglian_water/ @pantherale0
/tests/components/anglian_water/ @pantherale0
/homeassistant/components/anova/ @Lash-L
/tests/components/anova/ @Lash-L
/homeassistant/components/anthemav/ @hyralex
@@ -185,8 +183,8 @@ build.json @home-assistant/supervisor
/homeassistant/components/automation/ @home-assistant/core
/tests/components/automation/ @home-assistant/core
/homeassistant/components/avea/ @pattyland
/homeassistant/components/awair/ @ahayworth @ricohageman
/tests/components/awair/ @ahayworth @ricohageman
/homeassistant/components/awair/ @ahayworth @danielsjf
/tests/components/awair/ @ahayworth @danielsjf
/homeassistant/components/aws_s3/ @tomasbedrich
/tests/components/aws_s3/ @tomasbedrich
/homeassistant/components/axis/ @Kane610
@@ -452,8 +450,6 @@ build.json @home-assistant/supervisor
/tests/components/energenie_power_sockets/ @gnumpi
/homeassistant/components/energy/ @home-assistant/core
/tests/components/energy/ @home-assistant/core
/homeassistant/components/energyid/ @JrtPec @Molier
/tests/components/energyid/ @JrtPec @Molier
/homeassistant/components/energyzero/ @klaasnicolaas
/tests/components/energyzero/ @klaasnicolaas
/homeassistant/components/enigma2/ @autinerd
@@ -476,8 +472,6 @@ build.json @home-assistant/supervisor
/tests/components/escea/ @lazdavila
/homeassistant/components/esphome/ @jesserockz @kbx81 @bdraco
/tests/components/esphome/ @jesserockz @kbx81 @bdraco
/homeassistant/components/essent/ @jaapp
/tests/components/essent/ @jaapp
/homeassistant/components/eufylife_ble/ @bdr99
/tests/components/eufylife_ble/ @bdr99
/homeassistant/components/event/ @home-assistant/core
@@ -597,8 +591,6 @@ build.json @home-assistant/supervisor
/tests/components/goodwe/ @mletenay @starkillerOG
/homeassistant/components/google/ @allenporter
/tests/components/google/ @allenporter
/homeassistant/components/google_air_quality/ @Thomas55555
/tests/components/google_air_quality/ @Thomas55555
/homeassistant/components/google_assistant/ @home-assistant/cloud
/tests/components/google_assistant/ @home-assistant/cloud
/homeassistant/components/google_assistant_sdk/ @tronikos
@@ -708,8 +700,6 @@ build.json @home-assistant/supervisor
/tests/components/huawei_lte/ @scop @fphammerle
/homeassistant/components/hue/ @marcelveldt
/tests/components/hue/ @marcelveldt
/homeassistant/components/hue_ble/ @flip-dots
/tests/components/hue_ble/ @flip-dots
/homeassistant/components/huisbaasje/ @dennisschroer
/tests/components/huisbaasje/ @dennisschroer
/homeassistant/components/humidifier/ @home-assistant/core @Shulyaka

31
Dockerfile generated
View File

@@ -4,33 +4,32 @@
ARG BUILD_FROM
FROM ${BUILD_FROM}
LABEL \
io.hass.type="core" \
org.opencontainers.image.authors="The Home Assistant Authors" \
org.opencontainers.image.description="Open-source home automation platform running on Python 3" \
org.opencontainers.image.documentation="https://www.home-assistant.io/docs/" \
org.opencontainers.image.licenses="Apache-2.0" \
org.opencontainers.image.source="https://github.com/home-assistant/core" \
org.opencontainers.image.title="Home Assistant" \
org.opencontainers.image.url="https://www.home-assistant.io/"
# Synchronize with homeassistant/core.py:async_stop
ENV \
S6_SERVICES_GRACETIME=240000 \
UV_SYSTEM_PYTHON=true \
UV_NO_CACHE=true
ARG QEMU_CPU
# Home Assistant S6-Overlay
COPY rootfs /
# Add go2rtc binary
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
# Needs to be redefined inside the FROM statement to be set for RUN commands
ARG BUILD_ARCH
# Get go2rtc binary
RUN \
case "${BUILD_ARCH}" in \
"aarch64") go2rtc_suffix='arm64' ;; \
*) go2rtc_suffix=${BUILD_ARCH} ;; \
esac \
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
&& chmod +x /bin/go2rtc \
# Verify go2rtc can be executed
go2rtc --version \
# Install uv
&& pip3 install uv==0.9.6
&& go2rtc --version
# Install uv
RUN pip3 install uv==0.9.6
WORKDIR /usr/src

16
build.yaml Normal file
View File

@@ -0,0 +1,16 @@
image: ghcr.io/home-assistant/{arch}-homeassistant
build_from:
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
cosign:
base_identity: https://github.com/home-assistant/docker/.*
identity: https://github.com/home-assistant/core/.*
labels:
io.hass.type: core
org.opencontainers.image.title: Home Assistant
org.opencontainers.image.description: Open-source home automation platform running on Python 3
org.opencontainers.image.source: https://github.com/home-assistant/core
org.opencontainers.image.authors: The Home Assistant Authors
org.opencontainers.image.url: https://www.home-assistant.io/
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
org.opencontainers.image.licenses: Apache-2.0

View File

@@ -2,7 +2,6 @@
"domain": "google",
"name": "Google",
"integrations": [
"google_air_quality",
"google_assistant",
"google_assistant_sdk",
"google_cloud",

View File

@@ -1,5 +1,5 @@
{
"domain": "philips",
"name": "Philips",
"integrations": ["dynalite", "hue", "hue_ble", "philips_js"]
"integrations": ["dynalite", "hue", "philips_js"]
}

View File

@@ -1,5 +1,5 @@
{
"domain": "raspberry_pi",
"name": "Raspberry Pi",
"integrations": ["raspberry_pi", "rpi_power", "remote_rpi_gpio"]
"integrations": ["raspberry_pi", "rpi_camera", "rpi_power", "remote_rpi_gpio"]
}

View File

@@ -75,19 +75,9 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
@property
def current_temperature(self) -> float | None:
"""Return the current temperature.
If floor temperature is available, thermostat is set up for floor heating.
"""
if self._status.temp_floor is not None:
return self._status.temp_floor
"""Return the current temperature."""
return self._status.temp_air
@property
def current_humidity(self) -> float | None:
"""Return the current humidity."""
return self._status.hum_air
@property
def target_temperature(self) -> float | None:
"""Return the target temperature."""
@@ -136,13 +126,6 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
await self.coordinator.async_request_refresh()
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set HVAC mode.
This thermostat only supports HEAT mode. The climate platform validates
that only supported modes are passed, so this method is a no-op.
"""
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
try:

View File

@@ -59,9 +59,7 @@ rules:
exception-translations: done
icon-translations: todo
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: This integration doesn't have any cases where raising an issue is needed.
repair-issues: todo
stale-devices:
status: exempt
comment: Single device integration, no stale device handling needed.

View File

@@ -36,28 +36,5 @@
"alarm_trigger": {
"service": "mdi:bell-ring"
}
},
"triggers": {
"armed": {
"trigger": "mdi:shield"
},
"armed_away": {
"trigger": "mdi:shield-lock"
},
"armed_home": {
"trigger": "mdi:shield-home"
},
"armed_night": {
"trigger": "mdi:shield-moon"
},
"armed_vacation": {
"trigger": "mdi:shield-airplane"
},
"disarmed": {
"trigger": "mdi:shield-off"
},
"triggered": {
"trigger": "mdi:bell-ring"
}
}
}

View File

@@ -1,8 +1,4 @@
{
"common": {
"trigger_behavior_description": "The behavior of the targeted alarms to trigger on.",
"trigger_behavior_name": "Behavior"
},
"device_automation": {
"action_type": {
"arm_away": "Arm {entity_name} away",
@@ -75,15 +71,6 @@
"message": "Arming requires a code but none was given for {entity_id}."
}
},
"selector": {
"trigger_behavior": {
"options": {
"any": "Any",
"first": "First",
"last": "Last"
}
}
},
"services": {
"alarm_arm_away": {
"description": "Arms the alarm in the away mode.",
@@ -156,84 +143,5 @@
"name": "Trigger"
}
},
"title": "Alarm control panel",
"triggers": {
"armed": {
"description": "Triggers when an alarm is armed.",
"description_configured": "[%key:component::alarm_control_panel::triggers::armed::description%]",
"fields": {
"behavior": {
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
}
},
"name": "When an alarm is armed"
},
"armed_away": {
"description": "Triggers when an alarm is armed away.",
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_away::description%]",
"fields": {
"behavior": {
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
}
},
"name": "When an alarm is armed away"
},
"armed_home": {
"description": "Triggers when an alarm is armed home.",
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_home::description%]",
"fields": {
"behavior": {
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
}
},
"name": "When an alarm is armed home"
},
"armed_night": {
"description": "Triggers when an alarm is armed night.",
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_night::description%]",
"fields": {
"behavior": {
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
}
},
"name": "When an alarm is armed night"
},
"armed_vacation": {
"description": "Triggers when an alarm is armed vacation.",
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_vacation::description%]",
"fields": {
"behavior": {
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
}
},
"name": "When an alarm is armed vacation"
},
"disarmed": {
"description": "Triggers when an alarm is disarmed.",
"description_configured": "[%key:component::alarm_control_panel::triggers::disarmed::description%]",
"fields": {
"behavior": {
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
}
},
"name": "When an alarm is disarmed"
},
"triggered": {
"description": "Triggers when an alarm is triggered.",
"description_configured": "[%key:component::alarm_control_panel::triggers::triggered::description%]",
"fields": {
"behavior": {
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
}
},
"name": "When an alarm is triggered"
}
}
"title": "Alarm control panel"
}

View File

@@ -1,99 +0,0 @@
"""Provides triggers for alarm control panels."""
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity import get_supported_features
from homeassistant.helpers.trigger import (
EntityStateTriggerBase,
Trigger,
make_conditional_entity_state_trigger,
make_entity_state_trigger,
)
from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelState
def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool:
"""Get the device class of an entity or UNDEFINED if not found."""
try:
return bool(get_supported_features(hass, entity_id) & features)
except HomeAssistantError:
return False
class EntityStateTriggerRequiredFeatures(EntityStateTriggerBase):
"""Trigger for entity state changes."""
_required_features: int
def entity_filter(self, entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
entities = super().entity_filter(entities)
return {
entity_id
for entity_id in entities
if supports_feature(self._hass, entity_id, self._required_features)
}
def make_entity_state_trigger_required_features(
domain: str, to_state: str, required_features: int
) -> type[EntityStateTriggerBase]:
"""Create an entity state trigger class."""
class CustomTrigger(EntityStateTriggerRequiredFeatures):
"""Trigger for entity state changes."""
_domain = domain
_to_state = to_state
_required_features = required_features
return CustomTrigger
TRIGGERS: dict[str, type[Trigger]] = {
"armed": make_conditional_entity_state_trigger(
DOMAIN,
from_states={
AlarmControlPanelState.ARMING,
AlarmControlPanelState.DISARMED,
AlarmControlPanelState.DISARMING,
AlarmControlPanelState.PENDING,
AlarmControlPanelState.TRIGGERED,
},
to_states={
AlarmControlPanelState.ARMED_AWAY,
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
AlarmControlPanelState.ARMED_HOME,
AlarmControlPanelState.ARMED_NIGHT,
AlarmControlPanelState.ARMED_VACATION,
},
),
"armed_away": make_entity_state_trigger_required_features(
DOMAIN,
AlarmControlPanelState.ARMED_AWAY,
AlarmControlPanelEntityFeature.ARM_AWAY,
),
"armed_home": make_entity_state_trigger_required_features(
DOMAIN,
AlarmControlPanelState.ARMED_HOME,
AlarmControlPanelEntityFeature.ARM_HOME,
),
"armed_night": make_entity_state_trigger_required_features(
DOMAIN,
AlarmControlPanelState.ARMED_NIGHT,
AlarmControlPanelEntityFeature.ARM_NIGHT,
),
"armed_vacation": make_entity_state_trigger_required_features(
DOMAIN,
AlarmControlPanelState.ARMED_VACATION,
AlarmControlPanelEntityFeature.ARM_VACATION,
),
"disarmed": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.DISARMED),
"triggered": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.TRIGGERED),
}
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
"""Return the triggers for alarm control panels."""
return TRIGGERS

View File

@@ -1,53 +0,0 @@
.trigger_common: &trigger_common
target:
entity:
domain: alarm_control_panel
fields: &trigger_common_fields
behavior:
required: true
default: any
selector:
select:
options:
- first
- last
- any
translation_key: trigger_behavior
armed: *trigger_common
armed_away:
fields: *trigger_common_fields
target:
entity:
domain: alarm_control_panel
supported_features:
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_AWAY
armed_home:
fields: *trigger_common_fields
target:
entity:
domain: alarm_control_panel
supported_features:
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME
armed_night:
fields: *trigger_common_fields
target:
entity:
domain: alarm_control_panel
supported_features:
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_NIGHT
armed_vacation:
fields: *trigger_common_fields
target:
entity:
domain: alarm_control_panel
supported_features:
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_VACATION
disarmed: *trigger_common
triggered: *trigger_common

View File

@@ -21,7 +21,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
SCAN_INTERVAL = 300
SCAN_INTERVAL = 30
type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
@@ -45,7 +45,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
config_entry=entry,
update_interval=timedelta(seconds=SCAN_INTERVAL),
request_refresh_debouncer=Debouncer(
hass, _LOGGER, cooldown=SCAN_INTERVAL, immediate=False
hass, _LOGGER, cooldown=30, immediate=False
),
)
self.api = AmazonEchoApi(

View File

@@ -8,5 +8,5 @@
"iot_class": "cloud_polling",
"loggers": ["aioamazondevices"],
"quality_scale": "platinum",
"requirements": ["aioamazondevices==10.0.0"]
"requirements": ["aioamazondevices==9.0.2"]
}

View File

@@ -1,70 +0,0 @@
"""The Anglian Water integration."""
from __future__ import annotations
from pyanglianwater import AnglianWater
from pyanglianwater.auth import MSOB2CAuth
from pyanglianwater.exceptions import (
ExpiredAccessTokenError,
SelfAssertedError,
SmartMeterUnavailableError,
)
from homeassistant.const import (
CONF_ACCESS_TOKEN,
CONF_PASSWORD,
CONF_USERNAME,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
_PLATFORMS: list[Platform] = [Platform.SENSOR]
async def async_setup_entry(
hass: HomeAssistant, entry: AnglianWaterConfigEntry
) -> bool:
"""Set up Anglian Water from a config entry."""
auth = MSOB2CAuth(
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=async_get_clientsession(hass),
refresh_token=entry.data[CONF_ACCESS_TOKEN],
account_number=entry.data[CONF_ACCOUNT_NUMBER],
)
try:
await auth.send_refresh_request()
except (ExpiredAccessTokenError, SelfAssertedError) as err:
raise ConfigEntryAuthFailed from err
_aw = AnglianWater(authenticator=auth)
try:
await _aw.validate_smart_meter()
except SmartMeterUnavailableError as err:
raise ConfigEntryError(
translation_domain=DOMAIN, translation_key="smart_meter_unavailable"
) from err
hass.config_entries.async_update_entry(
entry, data={**entry.data, CONF_ACCESS_TOKEN: auth.refresh_token}
)
entry.runtime_data = coordinator = AnglianWaterUpdateCoordinator(
hass=hass, api=_aw, config_entry=entry
)
await coordinator.async_config_entry_first_refresh()
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
return True
async def async_unload_entry(
hass: HomeAssistant, entry: AnglianWaterConfigEntry
) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)

View File

@@ -1,103 +0,0 @@
"""Config flow for the Anglian Water integration."""
from __future__ import annotations
import logging
from typing import Any
from aiohttp import CookieJar
from pyanglianwater import AnglianWater
from pyanglianwater.auth import BaseAuth, MSOB2CAuth
from pyanglianwater.exceptions import (
InvalidAccountIdError,
SelfAssertedError,
SmartMeterUnavailableError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import selector
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): selector.TextSelector(),
vol.Required(CONF_PASSWORD): selector.TextSelector(
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
),
}
)
async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
"""Validate the provided credentials."""
try:
await auth.send_login_request()
except SelfAssertedError:
return "invalid_auth"
except Exception:
_LOGGER.exception("Unexpected exception")
return "unknown"
_aw = AnglianWater(authenticator=auth)
try:
await _aw.validate_smart_meter()
except (InvalidAccountIdError, SmartMeterUnavailableError):
return "smart_meter_unavailable"
return auth
class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Anglian Water."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
validation_response = await validate_credentials(
MSOB2CAuth(
username=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=async_create_clientsession(
self.hass,
cookie_jar=CookieJar(quote_cookie=False),
),
account_number=user_input.get(CONF_ACCOUNT_NUMBER),
)
)
if isinstance(validation_response, BaseAuth):
account_number = (
user_input.get(CONF_ACCOUNT_NUMBER)
or validation_response.account_number
)
await self.async_set_unique_id(account_number)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=account_number,
data={
**user_input,
CONF_ACCESS_TOKEN: validation_response.refresh_token,
CONF_ACCOUNT_NUMBER: account_number,
},
)
if validation_response == "smart_meter_unavailable":
return self.async_show_form(
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA.extend(
{
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
}
),
errors={"base": validation_response},
)
errors["base"] = validation_response
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)

View File

@@ -1,4 +0,0 @@
"""Constants for the Anglian Water integration."""
DOMAIN = "anglian_water"
CONF_ACCOUNT_NUMBER = "account_number"

View File

@@ -1,49 +0,0 @@
"""Anglian Water data coordinator."""
from __future__ import annotations
from datetime import timedelta
import logging
from pyanglianwater import AnglianWater
from pyanglianwater.exceptions import ExpiredAccessTokenError, UnknownEndpointError
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
type AnglianWaterConfigEntry = ConfigEntry[AnglianWaterUpdateCoordinator]
_LOGGER = logging.getLogger(__name__)
UPDATE_INTERVAL = timedelta(minutes=60)
class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
"""Anglian Water data update coordinator."""
config_entry: AnglianWaterConfigEntry
def __init__(
self,
hass: HomeAssistant,
api: AnglianWater,
config_entry: AnglianWaterConfigEntry,
) -> None:
"""Initialize update coordinator."""
super().__init__(
hass=hass,
logger=_LOGGER,
name=DOMAIN,
update_interval=UPDATE_INTERVAL,
config_entry=config_entry,
)
self.api = api
async def _async_update_data(self) -> None:
"""Update data from Anglian Water's API."""
try:
return await self.api.update()
except (ExpiredAccessTokenError, UnknownEndpointError) as err:
raise UpdateFailed from err

View File

@@ -1,44 +0,0 @@
"""Anglian Water entity."""
from __future__ import annotations
import logging
from pyanglianwater.meter import SmartMeter
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import AnglianWaterUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
class AnglianWaterEntity(CoordinatorEntity[AnglianWaterUpdateCoordinator]):
"""Defines a Anglian Water entity."""
def __init__(
self,
coordinator: AnglianWaterUpdateCoordinator,
smart_meter: SmartMeter,
) -> None:
"""Initialize Anglian Water entity."""
super().__init__(coordinator)
self.smart_meter = smart_meter
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, smart_meter.serial_number)},
name="Smart Water Meter",
manufacturer="Anglian Water",
serial_number=smart_meter.serial_number,
)
async def async_added_to_hass(self) -> None:
"""When entity is loaded."""
self.coordinator.api.updated_data_callbacks.append(self.async_write_ha_state)
await super().async_added_to_hass()
async def async_will_remove_from_hass(self) -> None:
"""When will be removed from HASS."""
self.coordinator.api.updated_data_callbacks.remove(self.async_write_ha_state)
await super().async_will_remove_from_hass()

View File

@@ -1,10 +0,0 @@
{
"domain": "anglian_water",
"name": "Anglian Water",
"codeowners": ["@pantherale0"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["pyanglianwater==2.1.0"]
}

View File

@@ -1,83 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: |
No custom actions are defined.
appropriate-polling: done
brands: done
common-modules: done
config-flow-test-coverage: done
config-flow: done
dependency-transparency: done
docs-actions:
status: exempt
comment: |
No custom actions are defined.
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup: done
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: |
No custom actions are defined.
config-entry-unloading: done
docs-configuration-parameters: done
docs-installation-parameters: done
entity-unavailable: done
integration-owner: done
log-when-unavailable: done
parallel-updates: done
reauthentication-flow: todo
test-coverage: todo
# Gold
devices: done
diagnostics: todo
discovery-update-info:
status: exempt
comment: |
Unable to discover meters.
discovery:
status: exempt
comment: |
Unable to discover meters.
docs-data-update: done
docs-examples: todo
docs-known-limitations: done
docs-supported-devices: done
docs-supported-functions: done
docs-troubleshooting: done
docs-use-cases: todo
dynamic-devices: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: |
No entities are disabled by default.
entity-translations: done
exception-translations: done
icon-translations:
status: exempt
comment: |
Entities do not require different icons.
reconfiguration-flow: todo
repair-issues:
status: exempt
comment: |
Read-only integration and no repairs are possible.
stale-devices: todo
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo

View File

@@ -1,118 +0,0 @@
"""Anglian Water sensor platform."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from enum import StrEnum
from pyanglianwater.meter import SmartMeter
from homeassistant.components.sensor import (
EntityCategory,
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import UnitOfVolume
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
from .entity import AnglianWaterEntity
PARALLEL_UPDATES = 0
class AnglianWaterSensor(StrEnum):
"""Store keys for Anglian Water sensors."""
YESTERDAY_CONSUMPTION = "yesterday_consumption"
YESTERDAY_WATER_COST = "yesterday_water_cost"
YESTERDAY_SEWERAGE_COST = "yesterday_sewerage_cost"
LATEST_READING = "latest_reading"
@dataclass(frozen=True, kw_only=True)
class AnglianWaterSensorEntityDescription(SensorEntityDescription):
"""Describes AnglianWater sensor entity."""
value_fn: Callable[[SmartMeter], float]
ENTITY_DESCRIPTIONS: tuple[AnglianWaterSensorEntityDescription, ...] = (
AnglianWaterSensorEntityDescription(
key=AnglianWaterSensor.YESTERDAY_CONSUMPTION,
native_unit_of_measurement=UnitOfVolume.LITERS,
device_class=SensorDeviceClass.WATER,
value_fn=lambda entity: entity.get_yesterday_consumption,
state_class=SensorStateClass.TOTAL,
translation_key=AnglianWaterSensor.YESTERDAY_CONSUMPTION,
entity_category=EntityCategory.DIAGNOSTIC,
),
AnglianWaterSensorEntityDescription(
key=AnglianWaterSensor.LATEST_READING,
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
device_class=SensorDeviceClass.WATER,
value_fn=lambda entity: entity.latest_read,
state_class=SensorStateClass.TOTAL_INCREASING,
translation_key=AnglianWaterSensor.LATEST_READING,
entity_category=EntityCategory.DIAGNOSTIC,
),
AnglianWaterSensorEntityDescription(
key=AnglianWaterSensor.YESTERDAY_WATER_COST,
native_unit_of_measurement="GBP",
device_class=SensorDeviceClass.MONETARY,
value_fn=lambda entity: entity.yesterday_water_cost,
translation_key=AnglianWaterSensor.YESTERDAY_WATER_COST,
entity_category=EntityCategory.DIAGNOSTIC,
),
AnglianWaterSensorEntityDescription(
key=AnglianWaterSensor.YESTERDAY_SEWERAGE_COST,
native_unit_of_measurement="GBP",
device_class=SensorDeviceClass.MONETARY,
value_fn=lambda entity: entity.yesterday_sewerage_cost,
translation_key=AnglianWaterSensor.YESTERDAY_SEWERAGE_COST,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: AnglianWaterConfigEntry,
async_add_devices: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the sensor platform."""
async_add_devices(
AnglianWaterSensorEntity(
coordinator=entry.runtime_data,
description=entity_description,
smart_meter=smart_meter,
)
for entity_description in ENTITY_DESCRIPTIONS
for smart_meter in entry.runtime_data.api.meters.values()
)
class AnglianWaterSensorEntity(AnglianWaterEntity, SensorEntity):
"""Defines a Anglian Water sensor."""
entity_description: AnglianWaterSensorEntityDescription
def __init__(
self,
coordinator: AnglianWaterUpdateCoordinator,
smart_meter: SmartMeter,
description: AnglianWaterSensorEntityDescription,
) -> None:
"""Initialize Anglian Water sensor."""
super().__init__(coordinator, smart_meter)
self.entity_description = description
self._attr_unique_id = f"{smart_meter.serial_number}_{description.key}"
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.smart_meter)

View File

@@ -1,55 +0,0 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
"smart_meter_unavailable": "This account does not have any smart meters associated with it. If this is unexpected, enter your Billing Account Number found at the top of your latest bill.",
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"user": {
"data": {
"account_number": "Billing Account Number",
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"account_number": "Your account number found on your latest bill.",
"password": "Your password",
"username": "Username or email used to login to the Anglian Water website."
},
"description": "Enter your Anglian Water account credentials to connect to Home Assistant."
}
}
},
"entity": {
"sensor": {
"latest_reading": {
"name": "Latest reading"
},
"yesterday_consumption": {
"name": "Yesterday's usage"
},
"yesterday_sewerage_cost": {
"name": "Yesterday's sewerage cost"
},
"yesterday_water_cost": {
"name": "Yesterday's water cost"
}
}
},
"exceptions": {
"auth_expired": {
"message": "Authentication token expired"
},
"service_unavailable": {
"message": "Anglian Water services are currently unavailable for maintenance."
},
"smart_meter_unavailable": {
"message": "This account no longer has a smart meter associated with it."
}
}
}

View File

@@ -17,7 +17,13 @@ from homeassistant.helpers import (
)
from homeassistant.helpers.typing import ConfigType
from .const import CONF_CHAT_MODEL, DEFAULT, DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
from .const import (
CONF_CHAT_MODEL,
DEFAULT_CONVERSATION_NAME,
DOMAIN,
LOGGER,
RECOMMENDED_CHAT_MODEL,
)
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
@@ -40,9 +46,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
# Use model from first conversation subentry for validation
subentries = list(entry.subentries.values())
if subentries:
model_id = subentries[0].data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
model_id = subentries[0].data.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
else:
model_id = DEFAULT[CONF_CHAT_MODEL]
model_id = RECOMMENDED_CHAT_MODEL
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
LOGGER.debug("Anthropic model: %s", model.display_name)
except anthropic.AuthenticationError as err:

View File

@@ -6,7 +6,7 @@ from functools import partial
import json
import logging
import re
from typing import Any, cast
from typing import Any
import anthropic
import voluptuous as vol
@@ -54,11 +54,17 @@ from .const import (
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT,
DEFAULT_AI_TASK_NAME,
DEFAULT_CONVERSATION_NAME,
DOMAIN,
NON_THINKING_MODELS,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_THINKING_BUDGET,
RECOMMENDED_WEB_SEARCH,
RECOMMENDED_WEB_SEARCH_MAX_USES,
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
WEB_SEARCH_UNSUPPORTED_MODELS,
)
@@ -70,13 +76,13 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
}
)
DEFAULT_CONVERSATION_OPTIONS = {
RECOMMENDED_CONVERSATION_OPTIONS = {
CONF_RECOMMENDED: True,
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
}
DEFAULT_AI_TASK_OPTIONS = {
RECOMMENDED_AI_TASK_OPTIONS = {
CONF_RECOMMENDED: True,
}
@@ -130,13 +136,13 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
subentries=[
{
"subentry_type": "conversation",
"data": DEFAULT_CONVERSATION_OPTIONS,
"data": RECOMMENDED_CONVERSATION_OPTIONS,
"title": DEFAULT_CONVERSATION_NAME,
"unique_id": None,
},
{
"subentry_type": "ai_task_data",
"data": DEFAULT_AI_TASK_OPTIONS,
"data": RECOMMENDED_AI_TASK_OPTIONS,
"title": DEFAULT_AI_TASK_NAME,
"unique_id": None,
},
@@ -174,9 +180,9 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
) -> SubentryFlowResult:
"""Add a subentry."""
if self._subentry_type == "ai_task_data":
self.options = DEFAULT_AI_TASK_OPTIONS.copy()
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
else:
self.options = DEFAULT_CONVERSATION_OPTIONS.copy()
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
return await self.async_step_init()
async def async_step_reconfigure(
@@ -277,7 +283,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
step_schema: VolDictType = {
vol.Optional(
CONF_CHAT_MODEL,
default=DEFAULT[CONF_CHAT_MODEL],
default=RECOMMENDED_CHAT_MODEL,
): SelectSelector(
SelectSelectorConfig(
options=await self._get_model_list(), custom_value=True
@@ -285,11 +291,11 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
),
vol.Optional(
CONF_MAX_TOKENS,
default=DEFAULT[CONF_MAX_TOKENS],
default=RECOMMENDED_MAX_TOKENS,
): int,
vol.Optional(
CONF_TEMPERATURE,
default=DEFAULT[CONF_TEMPERATURE],
default=RECOMMENDED_TEMPERATURE,
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
}
@@ -319,14 +325,12 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
if not model.startswith(tuple(NON_THINKING_MODELS)):
step_schema[
vol.Optional(
CONF_THINKING_BUDGET, default=DEFAULT[CONF_THINKING_BUDGET]
)
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
] = vol.All(
NumberSelector(
NumberSelectorConfig(
min=0,
max=self.options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
)
),
vol.Coerce(int),
@@ -339,15 +343,15 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
{
vol.Optional(
CONF_WEB_SEARCH,
default=DEFAULT[CONF_WEB_SEARCH],
default=RECOMMENDED_WEB_SEARCH,
): bool,
vol.Optional(
CONF_WEB_SEARCH_MAX_USES,
default=DEFAULT[CONF_WEB_SEARCH_MAX_USES],
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
): int,
vol.Optional(
CONF_WEB_SEARCH_USER_LOCATION,
default=DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
): bool,
}
)
@@ -365,10 +369,9 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
user_input = {}
if user_input is not None:
if user_input.get(CONF_WEB_SEARCH, DEFAULT[CONF_WEB_SEARCH]) and not errors:
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH) and not errors:
if user_input.get(
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
):
user_input.update(await self._get_location_data())
@@ -453,7 +456,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
}
)
response = await client.messages.create(
model=cast(str, DEFAULT[CONF_CHAT_MODEL]),
model=RECOMMENDED_CHAT_MODEL,
messages=[
{
"role": "user",
@@ -468,7 +471,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
"content": "{", # hints the model to skip any preamble
},
],
max_tokens=cast(int, DEFAULT[CONF_MAX_TOKENS]),
max_tokens=RECOMMENDED_MAX_TOKENS,
)
_LOGGER.debug("Model response: %s", response.content)
location_data = location_schema(

View File

@@ -11,29 +11,25 @@ DEFAULT_AI_TASK_NAME = "Claude AI Task"
CONF_RECOMMENDED = "recommended"
CONF_PROMPT = "prompt"
CONF_CHAT_MODEL = "chat_model"
RECOMMENDED_CHAT_MODEL = "claude-3-5-haiku-latest"
CONF_MAX_TOKENS = "max_tokens"
RECOMMENDED_MAX_TOKENS = 3000
CONF_TEMPERATURE = "temperature"
RECOMMENDED_TEMPERATURE = 1.0
CONF_THINKING_BUDGET = "thinking_budget"
RECOMMENDED_THINKING_BUDGET = 0
MIN_THINKING_BUDGET = 1024
CONF_WEB_SEARCH = "web_search"
RECOMMENDED_WEB_SEARCH = False
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
RECOMMENDED_WEB_SEARCH_MAX_USES = 5
CONF_WEB_SEARCH_CITY = "city"
CONF_WEB_SEARCH_REGION = "region"
CONF_WEB_SEARCH_COUNTRY = "country"
CONF_WEB_SEARCH_TIMEZONE = "timezone"
DEFAULT = {
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
CONF_MAX_TOKENS: 3000,
CONF_TEMPERATURE: 1.0,
CONF_THINKING_BUDGET: 0,
CONF_WEB_SEARCH: False,
CONF_WEB_SEARCH_USER_LOCATION: False,
CONF_WEB_SEARCH_MAX_USES: 5,
}
MIN_THINKING_BUDGET = 1024
NON_THINKING_MODELS = [
"claude-3-5", # Both sonnet and haiku
"claude-3-opus",

View File

@@ -84,11 +84,14 @@ from .const import (
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT,
DOMAIN,
LOGGER,
MIN_THINKING_BUDGET,
NON_THINKING_MODELS,
RECOMMENDED_CHAT_MODEL,
RECOMMENDED_MAX_TOKENS,
RECOMMENDED_TEMPERATURE,
RECOMMENDED_THINKING_BUDGET,
)
# Max number of back and forth with the LLM to generate a response
@@ -601,19 +604,17 @@ class AnthropicBaseLLMEntity(Entity):
raise TypeError("First message must be a system message")
messages = _convert_content(chat_log.content[1:])
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
model_args = MessageCreateParamsStreaming(
model=model,
messages=messages,
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
system=system.content,
stream=True,
)
thinking_budget = options.get(
CONF_THINKING_BUDGET, DEFAULT[CONF_THINKING_BUDGET]
)
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
if (
not model.startswith(tuple(NON_THINKING_MODELS))
and thinking_budget >= MIN_THINKING_BUDGET
@@ -624,7 +625,7 @@ class AnthropicBaseLLMEntity(Entity):
else:
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
model_args["temperature"] = options.get(
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
)
tools: list[ToolUnionParam] = []

View File

@@ -14,19 +14,5 @@
"start_conversation": {
"service": "mdi:forum"
}
},
"triggers": {
"idle": {
"trigger": "mdi:chat-sleep"
},
"listening": {
"trigger": "mdi:chat-question"
},
"processing": {
"trigger": "mdi:chat-processing"
},
"responding": {
"trigger": "mdi:chat-alert"
}
}
}

View File

@@ -1,8 +1,4 @@
{
"common": {
"trigger_behavior_description": "The behavior of the targeted Assist satellites to trigger on.",
"trigger_behavior_name": "Behavior"
},
"entity_component": {
"_": {
"name": "Assist satellite",
@@ -20,13 +16,6 @@
"id": "Answer ID",
"sentences": "Sentences"
}
},
"trigger_behavior": {
"options": {
"any": "Any",
"first": "First",
"last": "Last"
}
}
},
"services": {
@@ -109,51 +98,5 @@
"name": "Start conversation"
}
},
"title": "Assist satellite",
"triggers": {
"idle": {
"description": "Triggers when an Assist satellite becomes idle.",
"description_configured": "[%key:component::assist_satellite::triggers::idle::description%]",
"fields": {
"behavior": {
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
}
},
"name": "When an Assist satellite becomes idle"
},
"listening": {
"description": "Triggers when an Assist satellite starts listening.",
"description_configured": "[%key:component::assist_satellite::triggers::listening::description%]",
"fields": {
"behavior": {
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
}
},
"name": "When an Assist satellite starts listening"
},
"processing": {
"description": "Triggers when an Assist satellite is processing.",
"description_configured": "[%key:component::assist_satellite::triggers::processing::description%]",
"fields": {
"behavior": {
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
}
},
"name": "When an Assist satellite is processing"
},
"responding": {
"description": "Triggers when an Assist satellite is responding.",
"description_configured": "[%key:component::assist_satellite::triggers::responding::description%]",
"fields": {
"behavior": {
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
}
},
"name": "When an Assist satellite is responding"
}
}
"title": "Assist satellite"
}

View File

@@ -1,19 +0,0 @@
"""Provides triggers for assist satellites."""
from homeassistant.core import HomeAssistant
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
from .const import DOMAIN
from .entity import AssistSatelliteState
TRIGGERS: dict[str, type[Trigger]] = {
"idle": make_entity_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
"listening": make_entity_state_trigger(DOMAIN, AssistSatelliteState.LISTENING),
"processing": make_entity_state_trigger(DOMAIN, AssistSatelliteState.PROCESSING),
"responding": make_entity_state_trigger(DOMAIN, AssistSatelliteState.RESPONDING),
}
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
"""Return the triggers for assist satellites."""
return TRIGGERS

View File

@@ -1,20 +0,0 @@
.trigger_common: &trigger_common
target:
entity:
domain: assist_satellite
fields:
behavior:
required: true
default: any
selector:
select:
options:
- first
- last
- any
translation_key: trigger_behavior
idle: *trigger_common
listening: *trigger_common
processing: *trigger_common
responding: *trigger_common

View File

@@ -29,5 +29,5 @@
"documentation": "https://www.home-assistant.io/integrations/august",
"iot_class": "cloud_push",
"loggers": ["pubnub", "yalexs"],
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.1"]
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
}

View File

@@ -12,9 +12,8 @@ from typing import Any, Protocol, cast
from propcache.api import cached_property
import voluptuous as vol
from homeassistant.components import labs, websocket_api
from homeassistant.components import websocket_api
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
from homeassistant.components.labs import async_listen as async_labs_listen
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_MODE,
@@ -115,51 +114,6 @@ ATTR_SOURCE = "source"
ATTR_VARIABLES = "variables"
SERVICE_TRIGGER = "trigger"
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG = "new_triggers_conditions"
_EXPERIMENTAL_CONDITION_PLATFORMS = {
"light",
}
_EXPERIMENTAL_TRIGGER_PLATFORMS = {
"alarm_control_panel",
"assist_satellite",
"climate",
"cover",
"fan",
"lawn_mower",
"light",
"media_player",
"text",
"vacuum",
}
@callback
def is_disabled_experimental_condition(hass: HomeAssistant, platform: str) -> bool:
"""Check if the platform is a disabled experimental condition platform."""
return (
platform in _EXPERIMENTAL_CONDITION_PLATFORMS
and not labs.async_is_preview_feature_enabled(
hass,
DOMAIN,
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
)
)
@callback
def is_disabled_experimental_trigger(hass: HomeAssistant, platform: str) -> bool:
"""Check if the platform is a disabled experimental trigger platform."""
return (
platform in _EXPERIMENTAL_TRIGGER_PLATFORMS
and not labs.async_is_preview_feature_enabled(
hass,
DOMAIN,
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
)
)
class IfAction(Protocol):
"""Define the format of if_action."""
@@ -363,20 +317,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
schema=vol.Schema({vol.Optional(CONF_ID): str}),
)
@callback
def new_triggers_conditions_listener() -> None:
"""Handle new_triggers_conditions flag change."""
hass.async_create_task(
reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
)
async_labs_listen(
hass,
DOMAIN,
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
new_triggers_conditions_listener,
)
websocket_api.async_register_command(hass, websocket_config)
return True

View File

@@ -6,10 +6,5 @@
"dependencies": ["blueprint", "trace"],
"documentation": "https://www.home-assistant.io/integrations/automation",
"integration_type": "system",
"preview_features": {
"new_triggers_conditions": {
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/automation&integration_name=Automation"
}
},
"quality_scale": "internal"
}

View File

@@ -67,14 +67,6 @@
"title": "[%key:component::automation::common::validation_failed_title%]"
}
},
"preview_features": {
"new_triggers_conditions": {
"description": "Enables new intuitive triggers and conditions that are more user-friendly than technical state-based options.\n\nThese new automation features support targets across your entire home, letting you trigger automations for any entity, device, area, floor, or label (for example, when any light in your living room turned on). Integrations can now also provide their own intuitive triggers and conditions, just like actions.\n\nThis preview also includes a new tree view to help you navigate your home when adding triggers, conditions, and actions.",
"disable_confirmation": "Disabling this preview will cause automations and scripts that use the new intuitive triggers and conditions to fail.\n\nBefore disabling, ensure that your automations or scripts do not rely on this feature.",
"enable_confirmation": "This feature is still in development and may change. These new intuitive triggers and conditions are being refined based on user feedback and are not yet complete.\n\nBy enabling this preview, you'll have early access to these new capabilities, but be aware that they may be modified or updated in future releases.",
"name": "Intuitive triggers and conditions"
}
},
"services": {
"reload": {
"description": "Reloads the automation configuration.",

View File

@@ -1,7 +1,7 @@
{
"domain": "awair",
"name": "Awair",
"codeowners": ["@ahayworth", "@ricohageman"],
"codeowners": ["@ahayworth", "@danielsjf"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/awair",
"iot_class": "local_polling",

View File

@@ -1,98 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: No actions defined
appropriate-polling:
status: done
comment: |
We fetch both user and devices, could probably slow one down
brands: done
common-modules: done
config-flow:
status: todo
comment: |
data_description fields are missing
Should not abort in cloud step when anything else than invalid access token
Find out why access token is optional
Discovered devices step is redundant
config-flow-test-coverage:
status: todo
comment: |
Move happy flow to the top and merge with `test_show_form`
Reuse `result`
Cloud tests should initialize with data directly
Tests should finish in CREATE_ENTRY
dependency-transparency:
status: todo
comment: |
Dependency is not built in the CI
docs-actions: todo
docs-high-level-description: done
docs-installation-instructions: done
docs-removal-instructions: done
entity-event-setup:
status: exempt
comment: No explicit event subscription
entity-unique-id: done
has-entity-name: done
runtime-data: done
test-before-configure: done
test-before-setup: done
unique-config-entry: done
# Silver
action-exceptions:
status: exempt
comment: No actions defined
config-entry-unloading: done
docs-configuration-parameters: todo
docs-installation-parameters: todo
entity-unavailable: todo
integration-owner: done
log-when-unavailable: done
parallel-updates: todo
reauthentication-flow: done
test-coverage:
status: todo
comment: |
Patch objects where we use them
Use test helpers to load JSON
typo `no_devicess_fixture`
Make common config entries for cloud and local
Test setup of the integration
# Gold
devices:
status: done
comment: |
Can move to shorthand attribute
Can remove typecast
diagnostics: todo
discovery: done
discovery-update-info: done
docs-data-update: done
docs-examples: todo
docs-known-limitations: todo
docs-supported-devices: todo
docs-supported-functions: todo
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-device-class:
status: done
comment: |
Can remove rounding
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: done
reconfiguration-flow: todo
repair-issues: todo
stale-devices: todo
# Platinum
async-dependency: todo
inject-websession: done
strict-typing: todo

View File

@@ -21,10 +21,10 @@ from .const import (
ATTR_ITEM_NUMBER,
ATTR_SERIAL_NUMBER,
ATTR_TYPE_NUMBER,
COMPATIBLE_MODELS,
CONF_SERIAL_NUMBER,
DEFAULT_MODEL,
DOMAIN,
SELECTABLE_MODELS,
)
from .util import get_serial_number_from_jid
@@ -70,7 +70,7 @@ class BangOlufsenConfigFlowHandler(ConfigFlow, domain=DOMAIN):
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_MODEL, default=DEFAULT_MODEL): SelectSelector(
SelectSelectorConfig(options=SELECTABLE_MODELS)
SelectSelectorConfig(options=COMPATIBLE_MODELS)
),
}
)

View File

@@ -62,7 +62,6 @@ class BangOlufsenMediaType(StrEnum):
class BangOlufsenModel(StrEnum):
"""Enum for compatible model names."""
# Mozart devices
BEOCONNECT_CORE = "Beoconnect Core"
BEOLAB_8 = "BeoLab 8"
BEOLAB_28 = "BeoLab 28"
@@ -74,8 +73,6 @@ class BangOlufsenModel(StrEnum):
BEOSOUND_LEVEL = "Beosound Level"
BEOSOUND_PREMIERE = "Beosound Premiere"
BEOSOUND_THEATRE = "Beosound Theatre"
# Remote devices
BEOREMOTE_ONE = "Beoremote One"
# Physical "buttons" on devices
@@ -99,7 +96,6 @@ class WebsocketNotification(StrEnum):
"""Enum for WebSocket notification types."""
ACTIVE_LISTENING_MODE = "active_listening_mode"
BEO_REMOTE_BUTTON = "beo_remote_button"
BUTTON = "button"
PLAYBACK_ERROR = "playback_error"
PLAYBACK_METADATA = "playback_metadata"
@@ -117,7 +113,6 @@ class WebsocketNotification(StrEnum):
BEOLINK_AVAILABLE_LISTENERS = "beolinkAvailableListeners"
CONFIGURATION = "configuration"
NOTIFICATION = "notification"
REMOTE_CONTROL_DEVICES = "remoteControlDevices"
REMOTE_MENU_CHANGED = "remoteMenuChanged"
ALL = "all"
@@ -133,11 +128,7 @@ CONF_SERIAL_NUMBER: Final = "serial_number"
CONF_BEOLINK_JID: Final = "jid"
# Models to choose from in manual configuration.
SELECTABLE_MODELS: list[str] = [
model.value for model in BangOlufsenModel if model != BangOlufsenModel.BEOREMOTE_ONE
]
MANUFACTURER: Final[str] = "Bang & Olufsen"
COMPATIBLE_MODELS: list[str] = [x.value for x in BangOlufsenModel]
# Attribute names for zeroconf discovery.
ATTR_TYPE_NUMBER: Final[str] = "tn"
@@ -236,10 +227,6 @@ BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
EVENT_TRANSLATION_MAP: dict[str, str] = {
# Beoremote One
"KeyPress": "key_press",
"KeyRelease": "key_release",
# Physical "buttons"
"shortPress (Release)": "short_press_release",
"longPress (Timeout)": "long_press_timeout",
"longPress (Release)": "long_press_release",
@@ -260,70 +247,6 @@ DEVICE_BUTTON_EVENTS: Final[list[str]] = [
"very_long_press_release",
]
BEO_REMOTE_SUBMENU_CONTROL: Final[str] = "Control"
BEO_REMOTE_SUBMENU_LIGHT: Final[str] = "Light"
# Common for both submenus
BEO_REMOTE_KEYS: Final[tuple[str, ...]] = (
"Blue",
"Digit0",
"Digit1",
"Digit2",
"Digit3",
"Digit4",
"Digit5",
"Digit6",
"Digit7",
"Digit8",
"Digit9",
"Down",
"Green",
"Left",
"Play",
"Red",
"Rewind",
"Right",
"Select",
"Stop",
"Up",
"Wind",
"Yellow",
"Func1",
"Func2",
"Func3",
"Func4",
"Func5",
"Func6",
"Func7",
"Func8",
"Func9",
"Func10",
"Func11",
"Func12",
"Func13",
"Func14",
"Func15",
"Func16",
"Func17",
)
# "keys" that are unique to the Control submenu
BEO_REMOTE_CONTROL_KEYS: Final[tuple[str, ...]] = (
"Func18",
"Func19",
"Func20",
"Func21",
"Func22",
"Func23",
"Func24",
"Func25",
"Func26",
"Func27",
)
BEO_REMOTE_KEY_EVENTS: Final[list[str]] = ["key_press", "key_release"]
# Beolink Converter NL/ML sources need to be transformed to upper case
BEOLINK_JOIN_SOURCES_TO_UPPER = (
"aux_a",

View File

@@ -2,34 +2,16 @@
from __future__ import annotations
from typing import TYPE_CHECKING
from mozart_api.models import PairedRemote
from homeassistant.components.event import EventDeviceClass, EventEntity
from homeassistant.const import CONF_MODEL
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import BangOlufsenConfigEntry
from .const import (
BEO_REMOTE_CONTROL_KEYS,
BEO_REMOTE_KEY_EVENTS,
BEO_REMOTE_KEYS,
BEO_REMOTE_SUBMENU_CONTROL,
BEO_REMOTE_SUBMENU_LIGHT,
CONNECTION_STATUS,
DEVICE_BUTTON_EVENTS,
DOMAIN,
MANUFACTURER,
BangOlufsenModel,
WebsocketNotification,
)
from .const import CONNECTION_STATUS, DEVICE_BUTTON_EVENTS, WebsocketNotification
from .entity import BangOlufsenEntity
from .util import get_device_buttons, get_remotes
from .util import get_device_buttons
PARALLEL_UPDATES = 0
@@ -39,87 +21,24 @@ async def async_setup_entry(
config_entry: BangOlufsenConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Event entities from config entry."""
entities: list[BangOlufsenEvent] = []
"""Set up Sensor entities from config entry."""
async_add_entities(
BangOlufsenButtonEvent(config_entry, button_type)
for button_type in get_device_buttons(config_entry.data[CONF_MODEL])
)
# Check for connected Beoremote One
remotes = await get_remotes(config_entry.runtime_data.client)
for remote in remotes:
# Add Light keys
entities.extend(
[
BangOlufsenRemoteKeyEvent(
config_entry,
remote,
f"{BEO_REMOTE_SUBMENU_LIGHT}/{key_type}",
)
for key_type in BEO_REMOTE_KEYS
]
)
# Add Control keys
entities.extend(
[
BangOlufsenRemoteKeyEvent(
config_entry,
remote,
f"{BEO_REMOTE_SUBMENU_CONTROL}/{key_type}",
)
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
]
)
# If the remote is no longer available, then delete the device.
# The remote may appear as being available to the device after it has been unpaired on the remote
# As it has to be removed from the device on the app.
device_registry = dr.async_get(hass)
devices = device_registry.devices.get_devices_for_config_entry_id(
config_entry.entry_id
)
for device in devices:
if (
device.model == BangOlufsenModel.BEOREMOTE_ONE
and device.serial_number not in {remote.serial_number for remote in remotes}
):
device_registry.async_update_device(
device.id, remove_config_entry_id=config_entry.entry_id
)
async_add_entities(new_entities=entities)
class BangOlufsenEvent(BangOlufsenEntity, EventEntity):
"""Base Event class."""
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
"""Event class for Button events."""
_attr_device_class = EventDeviceClass.BUTTON
_attr_entity_registry_enabled_default = False
def __init__(self, config_entry: BangOlufsenConfigEntry) -> None:
"""Initialize Event."""
super().__init__(config_entry, config_entry.runtime_data.client)
@callback
def _async_handle_event(self, event: str) -> None:
"""Handle event."""
self._trigger_event(event)
self.async_write_ha_state()
class BangOlufsenButtonEvent(BangOlufsenEvent):
"""Event class for Button events."""
_attr_event_types = DEVICE_BUTTON_EVENTS
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
"""Initialize Button."""
super().__init__(config_entry)
super().__init__(config_entry, config_entry.runtime_data.client)
self._attr_unique_id = f"{self._unique_id}_{button_type}"
@@ -133,65 +52,20 @@ class BangOlufsenButtonEvent(BangOlufsenEvent):
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
f"{self._unique_id}_{CONNECTION_STATUS}",
self._async_update_connection_state,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
self._async_handle_event,
)
)
class BangOlufsenRemoteKeyEvent(BangOlufsenEvent):
"""Event class for Beoremote One key events."""
_attr_event_types = BEO_REMOTE_KEY_EVENTS
def __init__(
self,
config_entry: BangOlufsenConfigEntry,
remote: PairedRemote,
key_type: str,
) -> None:
"""Initialize Beoremote One key."""
super().__init__(config_entry)
if TYPE_CHECKING:
assert remote.serial_number
self._attr_unique_id = f"{remote.serial_number}_{self._unique_id}_{key_type}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")},
name=f"{BangOlufsenModel.BEOREMOTE_ONE}-{remote.serial_number}-{self._unique_id}",
model=BangOlufsenModel.BEOREMOTE_ONE,
serial_number=remote.serial_number,
sw_version=remote.app_version,
manufacturer=MANUFACTURER,
via_device=(DOMAIN, self._unique_id),
)
# Make the native key name Home Assistant compatible
self._attr_translation_key = key_type.lower().replace("/", "_")
self._key_type = key_type
async def async_added_to_hass(self) -> None:
"""Listen to WebSocket Beoremote One key events."""
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
self._async_update_connection_state,
)
)
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEO_REMOTE_BUTTON}_{self._key_type}",
self._async_handle_event,
)
)
@callback
def _async_handle_event(self, event: str) -> None:
"""Handle event."""
self._trigger_event(event)
self.async_write_ha_state()

View File

@@ -1,278 +1,4 @@
{
"entity": {
"event": {
"control_blue": {
"default": "mdi:remote"
},
"control_digit0": {
"default": "mdi:remote"
},
"control_digit1": {
"default": "mdi:remote"
},
"control_digit2": {
"default": "mdi:remote"
},
"control_digit3": {
"default": "mdi:remote"
},
"control_digit4": {
"default": "mdi:remote"
},
"control_digit5": {
"default": "mdi:remote"
},
"control_digit6": {
"default": "mdi:remote"
},
"control_digit7": {
"default": "mdi:remote"
},
"control_digit8": {
"default": "mdi:remote"
},
"control_digit9": {
"default": "mdi:remote"
},
"control_down": {
"default": "mdi:remote"
},
"control_func1": {
"default": "mdi:remote"
},
"control_func10": {
"default": "mdi:remote"
},
"control_func11": {
"default": "mdi:remote"
},
"control_func12": {
"default": "mdi:remote"
},
"control_func13": {
"default": "mdi:remote"
},
"control_func14": {
"default": "mdi:remote"
},
"control_func15": {
"default": "mdi:remote"
},
"control_func16": {
"default": "mdi:remote"
},
"control_func17": {
"default": "mdi:remote"
},
"control_func18": {
"default": "mdi:remote"
},
"control_func19": {
"default": "mdi:remote"
},
"control_func2": {
"default": "mdi:remote"
},
"control_func20": {
"default": "mdi:remote"
},
"control_func21": {
"default": "mdi:remote"
},
"control_func22": {
"default": "mdi:remote"
},
"control_func23": {
"default": "mdi:remote"
},
"control_func24": {
"default": "mdi:remote"
},
"control_func25": {
"default": "mdi:remote"
},
"control_func26": {
"default": "mdi:remote"
},
"control_func27": {
"default": "mdi:remote"
},
"control_func3": {
"default": "mdi:remote"
},
"control_func4": {
"default": "mdi:remote"
},
"control_func5": {
"default": "mdi:remote"
},
"control_func6": {
"default": "mdi:remote"
},
"control_func7": {
"default": "mdi:remote"
},
"control_func8": {
"default": "mdi:remote"
},
"control_func9": {
"default": "mdi:remote"
},
"control_green": {
"default": "mdi:remote"
},
"control_left": {
"default": "mdi:remote"
},
"control_play": {
"default": "mdi:remote"
},
"control_red": {
"default": "mdi:remote"
},
"control_rewind": {
"default": "mdi:remote"
},
"control_right": {
"default": "mdi:remote"
},
"control_select": {
"default": "mdi:remote"
},
"control_stop": {
"default": "mdi:remote"
},
"control_up": {
"default": "mdi:remote"
},
"control_wind": {
"default": "mdi:remote"
},
"control_yellow": {
"default": "mdi:remote"
},
"light_blue": {
"default": "mdi:remote"
},
"light_digit0": {
"default": "mdi:remote"
},
"light_digit1": {
"default": "mdi:remote"
},
"light_digit2": {
"default": "mdi:remote"
},
"light_digit3": {
"default": "mdi:remote"
},
"light_digit4": {
"default": "mdi:remote"
},
"light_digit5": {
"default": "mdi:remote"
},
"light_digit6": {
"default": "mdi:remote"
},
"light_digit7": {
"default": "mdi:remote"
},
"light_digit8": {
"default": "mdi:remote"
},
"light_digit9": {
"default": "mdi:remote"
},
"light_down": {
"default": "mdi:remote"
},
"light_func1": {
"default": "mdi:remote"
},
"light_func10": {
"default": "mdi:remote"
},
"light_func11": {
"default": "mdi:remote"
},
"light_func12": {
"default": "mdi:remote"
},
"light_func13": {
"default": "mdi:remote"
},
"light_func14": {
"default": "mdi:remote"
},
"light_func15": {
"default": "mdi:remote"
},
"light_func16": {
"default": "mdi:remote"
},
"light_func17": {
"default": "mdi:remote"
},
"light_func2": {
"default": "mdi:remote"
},
"light_func3": {
"default": "mdi:remote"
},
"light_func4": {
"default": "mdi:remote"
},
"light_func5": {
"default": "mdi:remote"
},
"light_func6": {
"default": "mdi:remote"
},
"light_func7": {
"default": "mdi:remote"
},
"light_func8": {
"default": "mdi:remote"
},
"light_func9": {
"default": "mdi:remote"
},
"light_green": {
"default": "mdi:remote"
},
"light_left": {
"default": "mdi:remote"
},
"light_play": {
"default": "mdi:remote"
},
"light_red": {
"default": "mdi:remote"
},
"light_rewind": {
"default": "mdi:remote"
},
"light_right": {
"default": "mdi:remote"
},
"light_select": {
"default": "mdi:remote"
},
"light_stop": {
"default": "mdi:remote"
},
"light_up": {
"default": "mdi:remote"
},
"light_wind": {
"default": "mdi:remote"
},
"light_yellow": {
"default": "mdi:remote"
}
}
},
"services": {
"beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" },
"beolink_expand": { "service": "mdi:location-enter" },

View File

@@ -80,7 +80,6 @@ from .const import (
CONNECTION_STATUS,
DOMAIN,
FALLBACK_SOURCES,
MANUFACTURER,
VALID_MEDIA_TYPES,
BangOlufsenMediaType,
BangOlufsenSource,
@@ -202,7 +201,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
self._attr_device_info = DeviceInfo(
configuration_url=f"http://{self._host}/#/",
identifiers={(DOMAIN, self._unique_id)},
manufacturer=MANUFACTURER,
manufacturer="Bang & Olufsen",
model=self._model,
serial_number=self._unique_id,
)
@@ -250,7 +249,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
self.async_on_remove(
async_dispatcher_connect(
self.hass,
f"{DOMAIN}_{self._unique_id}_{signal}",
f"{self._unique_id}_{signal}",
signal_handler,
)
)

File diff suppressed because it is too large Load Diff

View File

@@ -2,11 +2,6 @@
from __future__ import annotations
from typing import cast
from mozart_api.models import PairedRemote
from mozart_api.mozart_client import MozartClient
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.device_registry import DeviceEntry
@@ -28,18 +23,6 @@ def get_serial_number_from_jid(jid: str) -> str:
return jid.split(".")[2].split("@")[0]
async def get_remotes(client: MozartClient) -> list[PairedRemote]:
"""Get paired remotes."""
bluetooth_remote_list = await client.get_bluetooth_remotes()
return [
remote
for remote in cast(list[PairedRemote], bluetooth_remote_list.items)
if remote.serial_number is not None
]
def get_device_buttons(model: BangOlufsenModel) -> list[str]:
"""Get supported buttons for a given model."""
buttons = DEVICE_BUTTONS.copy()

View File

@@ -6,7 +6,6 @@ import logging
from typing import TYPE_CHECKING
from mozart_api.models import (
BeoRemoteButton,
ButtonEvent,
ListeningModeProps,
PlaybackContentMetadata,
@@ -29,13 +28,11 @@ from homeassistant.util.enum import try_parse_enum
from .const import (
BANG_OLUFSEN_WEBSOCKET_EVENT,
CONNECTION_STATUS,
DOMAIN,
EVENT_TRANSLATION_MAP,
BangOlufsenModel,
WebsocketNotification,
)
from .entity import BangOlufsenBase
from .util import get_device, get_remotes
from .util import get_device
_LOGGER = logging.getLogger(__name__)
@@ -60,9 +57,6 @@ class BangOlufsenWebsocket(BangOlufsenBase):
self._client.get_active_listening_mode_notifications(
self.on_active_listening_mode
)
self._client.get_beo_remote_button_notifications(
self.on_beo_remote_button_notification
)
self._client.get_button_notifications(self.on_button_notification)
self._client.get_playback_error_notifications(
@@ -93,7 +87,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
"""Update all entities of the connection status."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
f"{self._unique_id}_{CONNECTION_STATUS}",
self._client.websocket_connected,
)
@@ -111,22 +105,10 @@ class BangOlufsenWebsocket(BangOlufsenBase):
"""Send active_listening_mode dispatch."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}",
f"{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}",
notification,
)
def on_beo_remote_button_notification(self, notification: BeoRemoteButton) -> None:
"""Send beo_remote_button dispatch."""
if TYPE_CHECKING:
assert notification.type
# Send to event entity
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEO_REMOTE_BUTTON}_{notification.key}",
EVENT_TRANSLATION_MAP[notification.type],
)
def on_button_notification(self, notification: ButtonEvent) -> None:
"""Send button dispatch."""
# State is expected to always be available.
@@ -136,11 +118,11 @@ class BangOlufsenWebsocket(BangOlufsenBase):
# Send to event entity
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
EVENT_TRANSLATION_MAP[notification.state],
)
async def on_notification_notification(
def on_notification_notification(
self, notification: WebsocketNotificationTag
) -> None:
"""Send notification dispatch."""
@@ -154,51 +136,24 @@ class BangOlufsenWebsocket(BangOlufsenBase):
):
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEOLINK}",
f"{self._unique_id}_{WebsocketNotification.BEOLINK}",
)
elif notification_type is WebsocketNotification.CONFIGURATION:
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
)
elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
)
# This notification is triggered by a remote pairing, unpairing and connecting to a device
# So the current remote devices have to be compared to available remotes to determine action
elif notification_type is WebsocketNotification.REMOTE_CONTROL_DEVICES:
device_registry = dr.async_get(self.hass)
# Get remote devices connected to the device from Home Assistant
device_serial_numbers = [
device.serial_number
for device in device_registry.devices.get_devices_for_config_entry_id(
self.entry.entry_id
)
if device.serial_number is not None
and device.model == BangOlufsenModel.BEOREMOTE_ONE
]
# Get paired remotes from device
remote_serial_numbers = [
remote.serial_number
for remote in await get_remotes(self._client)
if remote.serial_number is not None
]
# Check if number of remote devices correspond to number of paired remotes
if len(remote_serial_numbers) != len(device_serial_numbers):
_LOGGER.info(
"A Beoremote One has been paired or unpaired to %s. Reloading config entry to add device and entities",
self.entry.title,
)
self.hass.config_entries.async_schedule_reload(self.entry.entry_id)
def on_playback_error_notification(self, notification: PlaybackError) -> None:
"""Send playback_error dispatch."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
notification,
)
@@ -208,7 +163,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
"""Send playback_metadata dispatch."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
notification,
)
@@ -216,7 +171,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
"""Send playback_progress dispatch."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
notification,
)
@@ -224,7 +179,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
"""Send playback_state dispatch."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
notification,
)
@@ -232,7 +187,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
"""Send playback_source dispatch."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
notification,
)
@@ -240,7 +195,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
"""Send source_change dispatch."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
f"{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
notification,
)
@@ -248,7 +203,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
"""Send volume dispatch."""
async_dispatcher_send(
self.hass,
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.VOLUME}",
f"{self._unique_id}_{WebsocketNotification.VOLUME}",
notification,
)

View File

@@ -20,7 +20,7 @@
"bluetooth-adapters==2.1.0",
"bluetooth-auto-recovery==1.5.3",
"bluetooth-data-tools==1.28.4",
"dbus-fast==3.1.2",
"dbus-fast==3.0.0",
"habluetooth==5.7.0"
]
}

View File

@@ -0,0 +1 @@
"""The bluetooth_tracker component."""

View File

@@ -0,0 +1,10 @@
"""Constants for the Bluetooth Tracker component."""
from typing import Final
DOMAIN: Final = "bluetooth_tracker"
SERVICE_UPDATE: Final = "update"
BT_PREFIX: Final = "BT_"
CONF_REQUEST_RSSI: Final = "request_rssi"
DEFAULT_DEVICE_ID: Final = -1

View File

@@ -0,0 +1,213 @@
"""Tracking for bluetooth devices."""
from __future__ import annotations
import asyncio
from datetime import datetime, timedelta
import logging
from typing import Final
import bluetooth
from bt_proximity import BluetoothRSSI
import voluptuous as vol
from homeassistant.components.device_tracker import (
CONF_SCAN_INTERVAL,
CONF_TRACK_NEW,
DEFAULT_TRACK_NEW,
PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA,
SCAN_INTERVAL,
SourceType,
)
from homeassistant.components.device_tracker.legacy import (
YAML_DEVICES,
AsyncSeeCallback,
Device,
async_load_config,
)
from homeassistant.const import CONF_DEVICE_ID
from homeassistant.core import HomeAssistant, ServiceCall
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from .const import (
BT_PREFIX,
CONF_REQUEST_RSSI,
DEFAULT_DEVICE_ID,
DOMAIN,
SERVICE_UPDATE,
)
_LOGGER: Final = logging.getLogger(__name__)
PLATFORM_SCHEMA: Final = DEVICE_TRACKER_PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_TRACK_NEW): cv.boolean,
vol.Optional(CONF_REQUEST_RSSI): cv.boolean,
vol.Optional(CONF_DEVICE_ID, default=DEFAULT_DEVICE_ID): vol.All(
vol.Coerce(int), vol.Range(min=-1)
),
}
)
def is_bluetooth_device(device: Device) -> bool:
"""Check whether a device is a bluetooth device by its mac."""
return device.mac is not None and device.mac[:3].upper() == BT_PREFIX
def discover_devices(device_id: int) -> list[tuple[str, str]]:
"""Discover Bluetooth devices."""
try:
result = bluetooth.discover_devices(
duration=8,
lookup_names=True,
flush_cache=True,
lookup_class=False,
device_id=device_id,
)
except OSError as ex:
# OSError is generally thrown if a bluetooth device isn't found
_LOGGER.error("Couldn't discover bluetooth devices: %s", ex)
return []
_LOGGER.debug("Bluetooth devices discovered = %d", len(result))
return result # type: ignore[no-any-return]
async def see_device(
hass: HomeAssistant,
async_see: AsyncSeeCallback,
mac: str,
device_name: str,
rssi: tuple[int] | None = None,
) -> None:
"""Mark a device as seen."""
attributes = {}
if rssi is not None:
attributes["rssi"] = rssi
await async_see(
mac=f"{BT_PREFIX}{mac}",
host_name=device_name,
attributes=attributes,
source_type=SourceType.BLUETOOTH,
)
async def get_tracking_devices(hass: HomeAssistant) -> tuple[set[str], set[str]]:
"""Load all known devices.
We just need the devices so set consider_home and home range to 0
"""
yaml_path: str = hass.config.path(YAML_DEVICES)
devices = await async_load_config(yaml_path, hass, timedelta(0))
bluetooth_devices = [device for device in devices if is_bluetooth_device(device)]
devices_to_track: set[str] = {
device.mac[3:]
for device in bluetooth_devices
if device.track and device.mac is not None
}
devices_to_not_track: set[str] = {
device.mac[3:]
for device in bluetooth_devices
if not device.track and device.mac is not None
}
return devices_to_track, devices_to_not_track
def lookup_name(mac: str) -> str | None:
"""Lookup a Bluetooth device name."""
_LOGGER.debug("Scanning %s", mac)
return bluetooth.lookup_name(mac, timeout=5) # type: ignore[no-any-return]
async def async_setup_scanner(
hass: HomeAssistant,
config: ConfigType,
async_see: AsyncSeeCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> bool:
"""Set up the Bluetooth Scanner."""
device_id: int = config[CONF_DEVICE_ID]
interval: timedelta = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
request_rssi: bool = config.get(CONF_REQUEST_RSSI, False)
update_bluetooth_lock = asyncio.Lock()
# If track new devices is true discover new devices on startup.
track_new: bool = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
_LOGGER.debug("Tracking new devices is set to %s", track_new)
devices_to_track, devices_to_not_track = await get_tracking_devices(hass)
if not devices_to_track and not track_new:
_LOGGER.debug("No Bluetooth devices to track and not tracking new devices")
if request_rssi:
_LOGGER.debug("Detecting RSSI for devices")
async def perform_bluetooth_update() -> None:
"""Discover Bluetooth devices and update status."""
_LOGGER.debug("Performing Bluetooth devices discovery and update")
tasks: list[asyncio.Task[None]] = []
try:
if track_new:
devices = await hass.async_add_executor_job(discover_devices, device_id)
for mac, _device_name in devices:
if mac not in devices_to_track and mac not in devices_to_not_track:
devices_to_track.add(mac)
for mac in devices_to_track:
friendly_name = await hass.async_add_executor_job(lookup_name, mac)
if friendly_name is None:
# Could not lookup device name
continue
rssi = None
if request_rssi:
client = BluetoothRSSI(mac)
rssi = await hass.async_add_executor_job(client.request_rssi)
client.close()
tasks.append(
asyncio.create_task(
see_device(hass, async_see, mac, friendly_name, rssi)
)
)
if tasks:
await asyncio.wait(tasks)
except bluetooth.BluetoothError:
_LOGGER.exception("Error looking up Bluetooth device")
async def update_bluetooth(now: datetime | None = None) -> None:
"""Lookup Bluetooth devices and update status."""
# If an update is in progress, we don't do anything
if update_bluetooth_lock.locked():
_LOGGER.debug(
(
"Previous execution of update_bluetooth is taking longer than the"
" scheduled update of interval %s"
),
interval,
)
return
async with update_bluetooth_lock:
await perform_bluetooth_update()
async def handle_manual_update_bluetooth(call: ServiceCall) -> None:
"""Update bluetooth devices on demand."""
await update_bluetooth()
hass.async_create_task(update_bluetooth())
async_track_time_interval(hass, update_bluetooth, interval)
hass.services.async_register(DOMAIN, SERVICE_UPDATE, handle_manual_update_bluetooth)
return True

View File

@@ -0,0 +1,7 @@
{
"services": {
"update": {
"service": "mdi:update"
}
}
}

View File

@@ -0,0 +1,10 @@
{
"domain": "bluetooth_tracker",
"name": "Bluetooth Tracker",
"codeowners": [],
"documentation": "https://www.home-assistant.io/integrations/bluetooth_tracker",
"iot_class": "local_polling",
"loggers": ["bluetooth", "bt_proximity"],
"quality_scale": "legacy",
"requirements": ["bt-proximity==0.2.1", "PyBluez==0.22"]
}

View File

@@ -0,0 +1 @@
update:

View File

@@ -0,0 +1,8 @@
{
"services": {
"update": {
"description": "Triggers manual tracker update.",
"name": "Update"
}
}
}

View File

@@ -96,16 +96,5 @@
"turn_on": {
"service": "mdi:power-on"
}
},
"triggers": {
"started_heating": {
"trigger": "mdi:fire"
},
"turned_off": {
"trigger": "mdi:power-off"
},
"turned_on": {
"trigger": "mdi:power-on"
}
}
}

View File

@@ -1,8 +1,4 @@
{
"common": {
"trigger_behavior_description": "The behavior of the targeted climates to trigger on.",
"trigger_behavior_name": "Behavior"
},
"device_automation": {
"action_type": {
"set_hvac_mode": "Change HVAC mode on {entity_name}",
@@ -191,13 +187,6 @@
"heat_cool": "Heat/cool",
"off": "[%key:common::state::off%]"
}
},
"trigger_behavior": {
"options": {
"any": "Any",
"first": "First",
"last": "Last"
}
}
},
"services": {
@@ -296,40 +285,5 @@
"name": "[%key:common::action::turn_on%]"
}
},
"title": "Climate",
"triggers": {
"started_heating": {
"description": "Triggers when a climate starts to heat.",
"description_configured": "[%key:component::climate::triggers::started_heating::description%]",
"fields": {
"behavior": {
"description": "[%key:component::climate::common::trigger_behavior_description%]",
"name": "[%key:component::climate::common::trigger_behavior_name%]"
}
},
"name": "When a climate starts to heat"
},
"turned_off": {
"description": "Triggers when a climate is turned off.",
"description_configured": "[%key:component::climate::triggers::turned_off::description%]",
"fields": {
"behavior": {
"description": "[%key:component::climate::common::trigger_behavior_description%]",
"name": "[%key:component::climate::common::trigger_behavior_name%]"
}
},
"name": "When a climate is turned off"
},
"turned_on": {
"description": "Triggers when a climate is turned on.",
"description_configured": "[%key:component::climate::triggers::turned_on::description%]",
"fields": {
"behavior": {
"description": "[%key:component::climate::common::trigger_behavior_description%]",
"name": "[%key:component::climate::common::trigger_behavior_name%]"
}
},
"name": "When a climate is turned on"
}
}
"title": "Climate"
}

View File

@@ -1,37 +0,0 @@
"""Provides triggers for climates."""
from homeassistant.core import HomeAssistant
from homeassistant.helpers.trigger import (
Trigger,
make_conditional_entity_state_trigger,
make_entity_state_attribute_trigger,
make_entity_state_trigger,
)
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
TRIGGERS: dict[str, type[Trigger]] = {
"turned_off": make_entity_state_trigger(DOMAIN, HVACMode.OFF),
"turned_on": make_conditional_entity_state_trigger(
DOMAIN,
from_states={
HVACMode.OFF,
},
to_states={
HVACMode.AUTO,
HVACMode.COOL,
HVACMode.DRY,
HVACMode.FAN_ONLY,
HVACMode.HEAT,
HVACMode.HEAT_COOL,
},
),
"started_heating": make_entity_state_attribute_trigger(
DOMAIN, ATTR_HVAC_ACTION, HVACAction.HEATING
),
}
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
"""Return the triggers for climates."""
return TRIGGERS

View File

@@ -1,19 +0,0 @@
.trigger_common: &trigger_common
target:
entity:
domain: climate
fields:
behavior:
required: true
default: any
selector:
select:
translation_key: trigger_behavior
options:
- first
- last
- any
started_heating: *trigger_common
turned_off: *trigger_common
turned_on: *trigger_common

View File

@@ -77,13 +77,7 @@ from .subscription import async_subscription_info
DEFAULT_MODE = MODE_PROD
PLATFORMS = [
Platform.AI_TASK,
Platform.BINARY_SENSOR,
Platform.CONVERSATION,
Platform.STT,
Platform.TTS,
]
PLATFORMS = [Platform.BINARY_SENSOR, Platform.STT, Platform.TTS]
SERVICE_REMOTE_CONNECT = "remote_connect"
SERVICE_REMOTE_DISCONNECT = "remote_disconnect"

View File

@@ -1,200 +0,0 @@
"""AI Task integration for Home Assistant Cloud."""
from __future__ import annotations
import io
from json import JSONDecodeError
import logging
from hass_nabucasa.llm import (
LLMAuthenticationError,
LLMError,
LLMImageAttachment,
LLMRateLimitError,
LLMResponseError,
LLMServiceError,
)
from PIL import Image
from homeassistant.components import ai_task, conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util.json import json_loads
from .const import AI_TASK_ENTITY_UNIQUE_ID, DATA_CLOUD
from .entity import BaseCloudLLMEntity
_LOGGER = logging.getLogger(__name__)
def _convert_image_for_editing(data: bytes) -> tuple[bytes, str]:
"""Ensure the image data is in a format accepted by OpenAI image edits."""
stream = io.BytesIO(data)
with Image.open(stream) as img:
mode = img.mode
if mode not in ("RGBA", "LA", "L"):
img = img.convert("RGBA")
output = io.BytesIO()
if img.mode in ("RGBA", "LA", "L"):
img.save(output, format="PNG")
return output.getvalue(), "image/png"
img.save(output, format=img.format or "PNG")
return output.getvalue(), f"image/{(img.format or 'png').lower()}"
async def async_prepare_image_generation_attachments(
hass: HomeAssistant, attachments: list[conversation.Attachment]
) -> list[LLMImageAttachment]:
"""Load attachment data for image generation."""
def prepare() -> list[LLMImageAttachment]:
items: list[LLMImageAttachment] = []
for attachment in attachments:
if not attachment.mime_type or not attachment.mime_type.startswith(
"image/"
):
raise HomeAssistantError(
"Only image attachments are supported for image generation"
)
path = attachment.path
if not path.exists():
raise HomeAssistantError(f"`{path}` does not exist")
data = path.read_bytes()
mime_type = attachment.mime_type
try:
data, mime_type = _convert_image_for_editing(data)
except HomeAssistantError:
raise
except Exception as err:
raise HomeAssistantError("Failed to process image attachment") from err
items.append(
LLMImageAttachment(
filename=path.name,
mime_type=mime_type,
data=data,
)
)
return items
return await hass.async_add_executor_job(prepare)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Home Assistant Cloud AI Task entity."""
cloud = hass.data[DATA_CLOUD]
try:
await cloud.llm.async_ensure_token()
except LLMError:
return
async_add_entities([CloudLLMTaskEntity(cloud, config_entry)])
class CloudLLMTaskEntity(ai_task.AITaskEntity, BaseCloudLLMEntity):
"""Home Assistant Cloud AI Task entity."""
_attr_has_entity_name = True
_attr_supported_features = (
ai_task.AITaskEntityFeature.GENERATE_DATA
| ai_task.AITaskEntityFeature.GENERATE_IMAGE
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
)
_attr_translation_key = "cloud_ai"
_attr_unique_id = AI_TASK_ENTITY_UNIQUE_ID
@property
def available(self) -> bool:
"""Return if the entity is available."""
return self._cloud.is_logged_in and self._cloud.valid_subscription
async def _async_generate_data(
self,
task: ai_task.GenDataTask,
chat_log: conversation.ChatLog,
) -> ai_task.GenDataTaskResult:
"""Handle a generate data task."""
await self._async_handle_chat_log(
"ai_task", chat_log, task.name, task.structure
)
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
raise HomeAssistantError(
"Last content in chat log is not an AssistantContent"
)
text = chat_log.content[-1].content or ""
if not task.structure:
return ai_task.GenDataTaskResult(
conversation_id=chat_log.conversation_id,
data=text,
)
try:
data = json_loads(text)
except JSONDecodeError as err:
_LOGGER.error(
"Failed to parse JSON response: %s. Response: %s",
err,
text,
)
raise HomeAssistantError("Error with OpenAI structured response") from err
return ai_task.GenDataTaskResult(
conversation_id=chat_log.conversation_id,
data=data,
)
async def _async_generate_image(
self,
task: ai_task.GenImageTask,
chat_log: conversation.ChatLog,
) -> ai_task.GenImageTaskResult:
"""Handle a generate image task."""
attachments: list[LLMImageAttachment] | None = None
if task.attachments:
attachments = await async_prepare_image_generation_attachments(
self.hass, task.attachments
)
try:
if attachments is None:
image = await self._cloud.llm.async_generate_image(
prompt=task.instructions,
)
else:
image = await self._cloud.llm.async_edit_image(
prompt=task.instructions,
attachments=attachments,
)
except LLMAuthenticationError as err:
raise ConfigEntryAuthFailed("Cloud LLM authentication failed") from err
except LLMRateLimitError as err:
raise HomeAssistantError("Cloud LLM is rate limited") from err
except LLMResponseError as err:
raise HomeAssistantError(str(err)) from err
except LLMServiceError as err:
raise HomeAssistantError("Error talking to Cloud LLM") from err
except LLMError as err:
raise HomeAssistantError(str(err)) from err
return ai_task.GenImageTaskResult(
conversation_id=chat_log.conversation_id,
mime_type=image["mime_type"],
image_data=image["image_data"],
model=image.get("model"),
width=image.get("width"),
height=image.get("height"),
revised_prompt=image.get("revised_prompt"),
)

View File

@@ -91,8 +91,6 @@ DISPATCHER_REMOTE_UPDATE: SignalType[Any] = SignalType("cloud_remote_update")
STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text"
TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech"
AI_TASK_ENTITY_UNIQUE_ID = "cloud-ai-task"
CONVERSATION_ENTITY_UNIQUE_ID = "cloud-conversation-agent"
LOGIN_MFA_TIMEOUT = 60

View File

@@ -1,75 +0,0 @@
"""Conversation support for Home Assistant Cloud."""
from __future__ import annotations
from typing import Literal
from hass_nabucasa.llm import LLMError
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import MATCH_ALL
from homeassistant.core import HomeAssistant
from homeassistant.helpers import llm
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import CONVERSATION_ENTITY_UNIQUE_ID, DATA_CLOUD, DOMAIN
from .entity import BaseCloudLLMEntity
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Home Assistant Cloud conversation entity."""
cloud = hass.data[DATA_CLOUD]
try:
await cloud.llm.async_ensure_token()
except LLMError:
return
async_add_entities([CloudConversationEntity(cloud, config_entry)])
class CloudConversationEntity(
conversation.ConversationEntity,
BaseCloudLLMEntity,
):
"""Home Assistant Cloud conversation agent."""
_attr_has_entity_name = True
_attr_name = "Home Assistant Cloud"
_attr_translation_key = "cloud_conversation"
_attr_unique_id = CONVERSATION_ENTITY_UNIQUE_ID
_attr_supported_features = conversation.ConversationEntityFeature.CONTROL
@property
def available(self) -> bool:
"""Return if the entity is available."""
return self._cloud.is_logged_in and self._cloud.valid_subscription
@property
def supported_languages(self) -> list[str] | Literal["*"]:
"""Return a list of supported languages."""
return MATCH_ALL
async def _async_handle_message(
self,
user_input: conversation.ConversationInput,
chat_log: conversation.ChatLog,
) -> conversation.ConversationResult:
"""Process a user input."""
try:
await chat_log.async_provide_llm_data(
user_input.as_llm_context(DOMAIN),
llm.LLM_API_ASSIST,
None,
user_input.extra_system_prompt,
)
except conversation.ConverseError as err:
return err.as_conversation_result()
await self._async_handle_chat_log("conversation", chat_log)
return conversation.async_get_result_from_chat_log(user_input, chat_log)

View File

@@ -1,615 +0,0 @@
"""Helpers for cloud LLM chat handling."""
import base64
from collections.abc import AsyncGenerator, Callable, Iterable
from enum import Enum
import json
import logging
import re
from typing import Any, Literal, cast
from hass_nabucasa import Cloud
from hass_nabucasa.llm import (
LLMAuthenticationError,
LLMError,
LLMRateLimitError,
LLMResponseError,
LLMServiceError,
)
from litellm import (
ResponseFunctionToolCall,
ResponseInputParam,
ResponsesAPIStreamEvents,
)
from openai.types.responses import (
FunctionToolParam,
ResponseInputItemParam,
ResponseReasoningItem,
ToolParam,
WebSearchToolParam,
)
from openai.types.responses.response_input_param import (
ImageGenerationCall as ImageGenerationCallParam,
)
from openai.types.responses.response_output_item import ImageGenerationCall
import voluptuous as vol
from voluptuous_openapi import convert
from homeassistant.components import conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
from homeassistant.helpers import llm
from homeassistant.helpers.entity import Entity
from homeassistant.util import slugify
from .client import CloudClient
_LOGGER = logging.getLogger(__name__)
_MAX_TOOL_ITERATIONS = 10
class ResponseItemType(str, Enum):
"""Response item types."""
FUNCTION_CALL = "function_call"
MESSAGE = "message"
REASONING = "reasoning"
WEB_SEARCH_CALL = "web_search_call"
IMAGE = "image"
def _convert_content_to_param(
chat_content: Iterable[conversation.Content],
) -> ResponseInputParam:
"""Convert any native chat message for this agent to the native format."""
messages: ResponseInputParam = []
reasoning_summary: list[str] = []
web_search_calls: dict[str, dict[str, Any]] = {}
for content in chat_content:
if isinstance(content, conversation.ToolResultContent):
if (
content.tool_name == "web_search_call"
and content.tool_call_id in web_search_calls
):
web_search_call = web_search_calls.pop(content.tool_call_id)
web_search_call["status"] = content.tool_result.get(
"status", "completed"
)
messages.append(cast("ResponseInputItemParam", web_search_call))
else:
messages.append(
{
"type": "function_call_output",
"call_id": content.tool_call_id,
"output": json.dumps(content.tool_result),
}
)
continue
if content.content:
role: Literal["user", "assistant", "system", "developer"] = content.role
if role == "system":
role = "developer"
messages.append(
{"type": "message", "role": role, "content": content.content}
)
if isinstance(content, conversation.AssistantContent):
if content.tool_calls:
for tool_call in content.tool_calls:
if (
tool_call.external
and tool_call.tool_name == "web_search_call"
and "action" in tool_call.tool_args
):
web_search_calls[tool_call.id] = {
"type": "web_search_call",
"id": tool_call.id,
"action": tool_call.tool_args["action"],
"status": "completed",
}
else:
messages.append(
{
"type": "function_call",
"name": tool_call.tool_name,
"arguments": json.dumps(tool_call.tool_args),
"call_id": tool_call.id,
}
)
if content.thinking_content:
reasoning_summary.append(content.thinking_content)
if isinstance(content.native, ResponseReasoningItem):
messages.append(
{
"type": "reasoning",
"id": content.native.id,
"summary": (
[
{
"type": "summary_text",
"text": summary,
}
for summary in reasoning_summary
]
if content.thinking_content
else []
),
"encrypted_content": content.native.encrypted_content,
}
)
reasoning_summary = []
elif isinstance(content.native, ImageGenerationCall):
messages.append(
cast(ImageGenerationCallParam, content.native.to_dict())
)
return messages
def _format_tool(
tool: llm.Tool,
custom_serializer: Callable[[Any], Any] | None,
) -> ToolParam:
"""Format a Home Assistant tool for the OpenAI Responses API."""
parameters = convert(tool.parameters, custom_serializer=custom_serializer)
spec: FunctionToolParam = {
"type": "function",
"name": tool.name,
"strict": False,
"description": tool.description,
"parameters": parameters,
}
return spec
def _adjust_schema(schema: dict[str, Any]) -> None:
"""Adjust the schema to be compatible with OpenAI API."""
if schema["type"] == "object":
schema.setdefault("strict", True)
schema.setdefault("additionalProperties", False)
if "properties" not in schema:
return
if "required" not in schema:
schema["required"] = []
# Ensure all properties are required
for prop, prop_info in schema["properties"].items():
_adjust_schema(prop_info)
if prop not in schema["required"]:
prop_info["type"] = [prop_info["type"], "null"]
schema["required"].append(prop)
elif schema["type"] == "array":
if "items" not in schema:
return
_adjust_schema(schema["items"])
def _format_structured_output(
schema: vol.Schema, llm_api: llm.APIInstance | None
) -> dict[str, Any]:
"""Format the schema to be compatible with OpenAI API."""
result: dict[str, Any] = convert(
schema,
custom_serializer=(
llm_api.custom_serializer if llm_api else llm.selector_serializer
),
)
_ensure_schema_constraints(result)
return result
def _ensure_schema_constraints(schema: dict[str, Any]) -> None:
"""Ensure generated schemas match the Responses API expectations."""
schema_type = schema.get("type")
if schema_type == "object":
schema.setdefault("additionalProperties", False)
properties = schema.get("properties")
if isinstance(properties, dict):
for property_schema in properties.values():
if isinstance(property_schema, dict):
_ensure_schema_constraints(property_schema)
elif schema_type == "array":
items = schema.get("items")
if isinstance(items, dict):
_ensure_schema_constraints(items)
# Borrowed and adapted from openai_conversation component
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
chat_log: conversation.ChatLog,
stream: Any,
remove_citations: bool = False,
) -> AsyncGenerator[
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
]:
"""Transform stream result into HA format."""
last_summary_index = None
last_role: Literal["assistant", "tool_result"] | None = None
current_tool_call: ResponseFunctionToolCall | None = None
# Non-reasoning models don't follow our request to remove citations, so we remove
# them manually here. They always follow the same pattern: the citation is always
# in parentheses in Markdown format, the citation is always in a single delta event,
# and sometimes the closing parenthesis is split into a separate delta event.
remove_parentheses: bool = False
citation_regexp = re.compile(r"\(\[([^\]]+)\]\((https?:\/\/[^\)]+)\)")
async for event in stream:
event_type = getattr(event, "type", None)
event_item = getattr(event, "item", None)
event_item_type = getattr(event_item, "type", None) if event_item else None
_LOGGER.debug(
"Event[%s] | item: %s",
event_type,
event_item_type,
)
if event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_ADDED:
# Detect function_call even when it's a BaseLiteLLMOpenAIResponseObject
if event_item_type == ResponseItemType.FUNCTION_CALL:
# OpenAI has tool calls as individual events
# while HA puts tool calls inside the assistant message.
# We turn them into individual assistant content for HA
# to ensure that tools are called as soon as possible.
yield {"role": "assistant"}
last_role = "assistant"
last_summary_index = None
current_tool_call = cast(ResponseFunctionToolCall, event.item)
elif (
event_item_type == ResponseItemType.MESSAGE
or (
event_item_type == ResponseItemType.REASONING
and last_summary_index is not None
) # Subsequent ResponseReasoningItem
or last_role != "assistant"
):
yield {"role": "assistant"}
last_role = "assistant"
last_summary_index = None
elif event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_DONE:
if event_item_type == ResponseItemType.REASONING:
encrypted_content = getattr(event.item, "encrypted_content", None)
summary = getattr(event.item, "summary", []) or []
yield {
"native": ResponseReasoningItem(
type="reasoning",
id=event.item.id,
summary=[],
encrypted_content=encrypted_content,
)
}
last_summary_index = len(summary) - 1 if summary else None
elif event_item_type == ResponseItemType.WEB_SEARCH_CALL:
action = getattr(event.item, "action", None)
if isinstance(action, dict):
action_dict = action
elif action is not None:
action_dict = action.to_dict()
else:
action_dict = {}
yield {
"tool_calls": [
llm.ToolInput(
id=event.item.id,
tool_name="web_search_call",
tool_args={"action": action_dict},
external=True,
)
]
}
yield {
"role": "tool_result",
"tool_call_id": event.item.id,
"tool_name": "web_search_call",
"tool_result": {"status": event.item.status},
}
last_role = "tool_result"
elif event_item_type == ResponseItemType.IMAGE:
yield {"native": event.item}
last_summary_index = -1 # Trigger new assistant message on next turn
elif event_type == ResponsesAPIStreamEvents.OUTPUT_TEXT_DELTA:
data = event.delta
if remove_parentheses:
data = data.removeprefix(")")
remove_parentheses = False
elif remove_citations and (match := citation_regexp.search(data)):
match_start, match_end = match.span()
# remove leading space if any
if data[match_start - 1 : match_start] == " ":
match_start -= 1
# remove closing parenthesis:
if data[match_end : match_end + 1] == ")":
match_end += 1
else:
remove_parentheses = True
data = data[:match_start] + data[match_end:]
if data:
yield {"content": data}
elif event_type == ResponsesAPIStreamEvents.REASONING_SUMMARY_TEXT_DELTA:
# OpenAI can output several reasoning summaries
# in a single ResponseReasoningItem. We split them as separate
# AssistantContent messages. Only last of them will have
# the reasoning `native` field set.
if (
last_summary_index is not None
and event.summary_index != last_summary_index
):
yield {"role": "assistant"}
last_role = "assistant"
last_summary_index = event.summary_index
yield {"thinking_content": event.delta}
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DELTA:
if current_tool_call is not None:
current_tool_call.arguments += event.delta
elif event_type == ResponsesAPIStreamEvents.WEB_SEARCH_CALL_SEARCHING:
yield {"role": "assistant"}
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DONE:
if current_tool_call is not None:
current_tool_call.status = "completed"
raw_args = json.loads(current_tool_call.arguments)
for key in ("area", "floor"):
if key in raw_args and not raw_args[key]:
# Remove keys that are "" or None
raw_args.pop(key, None)
yield {
"tool_calls": [
llm.ToolInput(
id=current_tool_call.call_id,
tool_name=current_tool_call.name,
tool_args=raw_args,
)
]
}
elif event_type == ResponsesAPIStreamEvents.RESPONSE_COMPLETED:
if event.response.usage is not None:
chat_log.async_trace(
{
"stats": {
"input_tokens": event.response.usage.input_tokens,
"output_tokens": event.response.usage.output_tokens,
}
}
)
elif event_type == ResponsesAPIStreamEvents.RESPONSE_INCOMPLETE:
if event.response.usage is not None:
chat_log.async_trace(
{
"stats": {
"input_tokens": event.response.usage.input_tokens,
"output_tokens": event.response.usage.output_tokens,
}
}
)
if (
event.response.incomplete_details
and event.response.incomplete_details.reason
):
reason: str = event.response.incomplete_details.reason
else:
reason = "unknown reason"
if reason == "max_output_tokens":
reason = "max output tokens reached"
elif reason == "content_filter":
reason = "content filter triggered"
raise HomeAssistantError(f"OpenAI response incomplete: {reason}")
elif event_type == ResponsesAPIStreamEvents.RESPONSE_FAILED:
if event.response.usage is not None:
chat_log.async_trace(
{
"stats": {
"input_tokens": event.response.usage.input_tokens,
"output_tokens": event.response.usage.output_tokens,
}
}
)
reason = "unknown reason"
if event.response.error is not None:
reason = event.response.error.message
raise HomeAssistantError(f"OpenAI response failed: {reason}")
elif event_type == ResponsesAPIStreamEvents.ERROR:
raise HomeAssistantError(f"OpenAI response error: {event.message}")
class BaseCloudLLMEntity(Entity):
"""Cloud LLM conversation agent."""
def __init__(self, cloud: Cloud[CloudClient], config_entry: ConfigEntry) -> None:
"""Initialize the entity."""
self._cloud = cloud
self._entry = config_entry
async def _prepare_chat_for_generation(
self,
chat_log: conversation.ChatLog,
messages: ResponseInputParam,
response_format: dict[str, Any] | None = None,
) -> dict[str, Any]:
"""Prepare kwargs for Cloud LLM from the chat log."""
last_content: Any = chat_log.content[-1]
if last_content.role == "user" and last_content.attachments:
files = await self._async_prepare_files_for_prompt(last_content.attachments)
current_content = last_content.content
last_content = [*(current_content or []), *files]
tools: list[ToolParam] = []
tool_choice: str | None = None
if chat_log.llm_api:
ha_tools: list[ToolParam] = [
_format_tool(tool, chat_log.llm_api.custom_serializer)
for tool in chat_log.llm_api.tools
]
if ha_tools:
if not chat_log.unresponded_tool_results:
tools = ha_tools
tool_choice = "auto"
else:
tools = []
tool_choice = "none"
web_search = WebSearchToolParam(
type="web_search",
search_context_size="medium",
)
tools.append(web_search)
response_kwargs: dict[str, Any] = {
"messages": messages,
"conversation_id": chat_log.conversation_id,
}
if response_format is not None:
response_kwargs["response_format"] = response_format
if tools is not None:
response_kwargs["tools"] = tools
if tool_choice is not None:
response_kwargs["tool_choice"] = tool_choice
response_kwargs["stream"] = True
return response_kwargs
async def _async_prepare_files_for_prompt(
self,
attachments: list[conversation.Attachment],
) -> list[dict[str, Any]]:
"""Prepare files for multimodal prompts."""
def prepare() -> list[dict[str, Any]]:
content: list[dict[str, Any]] = []
for attachment in attachments:
mime_type = attachment.mime_type
path = attachment.path
if not path.exists():
raise HomeAssistantError(f"`{path}` does not exist")
data = base64.b64encode(path.read_bytes()).decode("utf-8")
if mime_type and mime_type.startswith("image/"):
content.append(
{
"type": "input_image",
"image_url": f"data:{mime_type};base64,{data}",
"detail": "auto",
}
)
elif mime_type and mime_type.startswith("application/pdf"):
content.append(
{
"type": "input_file",
"filename": str(path.name),
"file_data": f"data:{mime_type};base64,{data}",
}
)
else:
raise HomeAssistantError(
"Only images and PDF are currently supported as attachments"
)
return content
return await self.hass.async_add_executor_job(prepare)
async def _async_handle_chat_log(
self,
type: Literal["ai_task", "conversation"],
chat_log: conversation.ChatLog,
structure_name: str | None = None,
structure: vol.Schema | None = None,
) -> None:
"""Generate a response for the chat log."""
for _ in range(_MAX_TOOL_ITERATIONS):
response_format: dict[str, Any] | None = None
if structure and structure_name:
response_format = {
"type": "json_schema",
"json_schema": {
"name": slugify(structure_name),
"schema": _format_structured_output(
structure, chat_log.llm_api
),
"strict": True,
},
}
messages = _convert_content_to_param(chat_log.content)
response_kwargs = await self._prepare_chat_for_generation(
chat_log,
messages,
response_format,
)
try:
if type == "conversation":
raw_stream = await self._cloud.llm.async_process_conversation(
**response_kwargs,
)
else:
raw_stream = await self._cloud.llm.async_generate_data(
**response_kwargs,
)
messages.extend(
_convert_content_to_param(
[
content
async for content in chat_log.async_add_delta_content_stream(
self.entity_id,
_transform_stream(
chat_log,
raw_stream,
True,
),
)
]
)
)
except LLMAuthenticationError as err:
raise ConfigEntryAuthFailed("Cloud LLM authentication failed") from err
except LLMRateLimitError as err:
raise HomeAssistantError("Cloud LLM is rate limited") from err
except LLMResponseError as err:
raise HomeAssistantError(str(err)) from err
except LLMServiceError as err:
raise HomeAssistantError("Error talking to Cloud LLM") from err
except LLMError as err:
raise HomeAssistantError(str(err)) from err
if not chat_log.unresponded_tool_results:
break

View File

@@ -1,7 +1,5 @@
"""Helpers for the cloud component."""
from __future__ import annotations
from collections import deque
import logging

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.6.1"],
"requirements": ["hass-nabucasa==1.5.1"],
"single_config_entry": true
}

View File

@@ -1,11 +1,4 @@
{
"entity": {
"ai_task": {
"cloud_ai": {
"name": "Home Assistant Cloud AI"
}
}
},
"exceptions": {
"backup_size_too_large": {
"message": "The backup size of {size}GB is too large to be uploaded to Home Assistant Cloud."

View File

@@ -7,7 +7,6 @@ from collections.abc import AsyncGenerator, AsyncIterable, Callable, Generator
from contextlib import contextmanager
from contextvars import ContextVar
from dataclasses import asdict, dataclass, field, replace
from datetime import datetime
import logging
from pathlib import Path
from typing import Any, Literal, TypedDict, cast
@@ -17,18 +16,14 @@ import voluptuous as vol
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError, TemplateError
from homeassistant.helpers import chat_session, frame, intent, llm, template
from homeassistant.util.dt import utcnow
from homeassistant.util.hass_dict import HassKey
from homeassistant.util.json import JsonObjectType
from . import trace
from .const import ChatLogEventType
from .models import ConversationInput, ConversationResult
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
DATA_SUBSCRIPTIONS: HassKey[
list[Callable[[str, ChatLogEventType, dict[str, Any]], None]]
] = HassKey("conversation_chat_log_subscriptions")
LOGGER = logging.getLogger(__name__)
current_chat_log: ContextVar[ChatLog | None] = ContextVar(
@@ -36,40 +31,6 @@ current_chat_log: ContextVar[ChatLog | None] = ContextVar(
)
@callback
def async_subscribe_chat_logs(
hass: HomeAssistant,
callback_func: Callable[[str, ChatLogEventType, dict[str, Any]], None],
) -> Callable[[], None]:
"""Subscribe to all chat logs."""
subscriptions = hass.data.get(DATA_SUBSCRIPTIONS)
if subscriptions is None:
subscriptions = []
hass.data[DATA_SUBSCRIPTIONS] = subscriptions
subscriptions.append(callback_func)
@callback
def unsubscribe() -> None:
"""Unsubscribe from chat logs."""
subscriptions.remove(callback_func)
return unsubscribe
@callback
def _async_notify_subscribers(
hass: HomeAssistant,
conversation_id: str,
event_type: ChatLogEventType,
data: dict[str, Any],
) -> None:
"""Notify subscribers of a chat log event."""
if subscriptions := hass.data.get(DATA_SUBSCRIPTIONS):
for callback_func in subscriptions:
callback_func(conversation_id, event_type, data)
@contextmanager
def async_get_chat_log(
hass: HomeAssistant,
@@ -102,8 +63,6 @@ def async_get_chat_log(
all_chat_logs = {}
hass.data[DATA_CHAT_LOGS] = all_chat_logs
is_new_log = session.conversation_id not in all_chat_logs
if chat_log := all_chat_logs.get(session.conversation_id):
chat_log = replace(chat_log, content=chat_log.content.copy())
else:
@@ -112,15 +71,6 @@ def async_get_chat_log(
if chat_log_delta_listener:
chat_log.delta_listener = chat_log_delta_listener
# Fire CREATED event for new chat logs before any content is added
if is_new_log:
_async_notify_subscribers(
hass,
session.conversation_id,
ChatLogEventType.CREATED,
{"chat_log": chat_log.as_dict()},
)
if user_input is not None:
chat_log.async_add_user_content(UserContent(content=user_input.text))
@@ -134,28 +84,14 @@ def async_get_chat_log(
LOGGER.debug(
"Chat Log opened but no assistant message was added, ignoring update"
)
# If this was a new log but nothing was added, fire DELETED to clean up
if is_new_log:
_async_notify_subscribers(
hass,
session.conversation_id,
ChatLogEventType.DELETED,
{},
)
return
if is_new_log:
if session.conversation_id not in all_chat_logs:
@callback
def do_cleanup() -> None:
"""Handle cleanup."""
all_chat_logs.pop(session.conversation_id)
_async_notify_subscribers(
hass,
session.conversation_id,
ChatLogEventType.DELETED,
{},
)
session.async_on_cleanup(do_cleanup)
@@ -164,16 +100,6 @@ def async_get_chat_log(
all_chat_logs[session.conversation_id] = chat_log
# For new logs, CREATED was already fired before content was added
# For existing logs, fire UPDATED
if not is_new_log:
_async_notify_subscribers(
hass,
session.conversation_id,
ChatLogEventType.UPDATED,
{"chat_log": chat_log.as_dict()},
)
class ConverseError(HomeAssistantError):
"""Error during initialization of conversation.
@@ -203,15 +129,6 @@ class SystemContent:
role: Literal["system"] = field(init=False, default="system")
content: str
created: datetime = field(init=False, default_factory=utcnow)
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the content."""
return {
"role": self.role,
"content": self.content,
"created": self.created,
}
@dataclass(frozen=True)
@@ -221,20 +138,6 @@ class UserContent:
role: Literal["user"] = field(init=False, default="user")
content: str
attachments: list[Attachment] | None = field(default=None)
created: datetime = field(init=False, default_factory=utcnow)
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the content."""
result: dict[str, Any] = {
"role": self.role,
"content": self.content,
"created": self.created,
}
if self.attachments:
result["attachments"] = [
attachment.as_dict() for attachment in self.attachments
]
return result
@dataclass(frozen=True)
@@ -250,14 +153,6 @@ class Attachment:
path: Path
"""Path to the attachment on disk."""
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the attachment."""
return {
"media_content_id": self.media_content_id,
"mime_type": self.mime_type,
"path": str(self.path),
}
@dataclass(frozen=True)
class AssistantContent:
@@ -269,22 +164,6 @@ class AssistantContent:
thinking_content: str | None = None
tool_calls: list[llm.ToolInput] | None = None
native: Any = None
created: datetime = field(init=False, default_factory=utcnow)
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the content."""
result: dict[str, Any] = {
"role": self.role,
"agent_id": self.agent_id,
"created": self.created,
}
if self.content:
result["content"] = self.content
if self.thinking_content:
result["thinking_content"] = self.thinking_content
if self.tool_calls:
result["tool_calls"] = self.tool_calls
return result
@dataclass(frozen=True)
@@ -296,18 +175,6 @@ class ToolResultContent:
tool_call_id: str
tool_name: str
tool_result: JsonObjectType
created: datetime = field(init=False, default_factory=utcnow)
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the content."""
return {
"role": self.role,
"agent_id": self.agent_id,
"tool_call_id": self.tool_call_id,
"tool_name": self.tool_name,
"tool_result": self.tool_result,
"created": self.created,
}
type Content = SystemContent | UserContent | AssistantContent | ToolResultContent
@@ -343,16 +210,6 @@ class ChatLog:
llm_api: llm.APIInstance | None = None
delta_listener: Callable[[ChatLog, dict], None] | None = None
llm_input_provided_index = 0
created: datetime = field(init=False, default_factory=utcnow)
def as_dict(self) -> dict[str, Any]:
"""Return a dictionary representation of the chat log."""
return {
"conversation_id": self.conversation_id,
"continue_conversation": self.continue_conversation,
"content": [c.as_dict() for c in self.content],
"created": self.created,
}
@property
def continue_conversation(self) -> bool:
@@ -384,12 +241,6 @@ class ChatLog:
"""Add user content to the log."""
LOGGER.debug("Adding user content: %s", content)
self.content.append(content)
_async_notify_subscribers(
self.hass,
self.conversation_id,
ChatLogEventType.CONTENT_ADDED,
{"content": content.as_dict()},
)
@callback
def async_add_assistant_content_without_tools(
@@ -408,12 +259,6 @@ class ChatLog:
):
raise ValueError("Non-external tool calls not allowed")
self.content.append(content)
_async_notify_subscribers(
self.hass,
self.conversation_id,
ChatLogEventType.CONTENT_ADDED,
{"content": content.as_dict()},
)
async def async_add_assistant_content(
self,
@@ -472,14 +317,6 @@ class ChatLog:
tool_result=tool_result,
)
self.content.append(response_content)
_async_notify_subscribers(
self.hass,
self.conversation_id,
ChatLogEventType.CONTENT_ADDED,
{
"content": response_content.as_dict(),
},
)
yield response_content
async def async_add_delta_content_stream(
@@ -756,12 +593,6 @@ class ChatLog:
self.llm_api = llm_api
self.extra_system_prompt = extra_system_prompt
self.content[0] = SystemContent(content=prompt)
_async_notify_subscribers(
self.hass,
self.conversation_id,
ChatLogEventType.UPDATED,
{"chat_log": self.as_dict()},
)
LOGGER.debug("Prompt: %s", self.content)
LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None)

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from enum import IntFlag, StrEnum
from enum import IntFlag
from typing import TYPE_CHECKING
from homeassistant.util.hass_dict import HassKey
@@ -34,13 +34,3 @@ class ConversationEntityFeature(IntFlag):
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
METADATA_CUSTOM_FILE = "hass_custom_file"
class ChatLogEventType(StrEnum):
"""Chat log event type."""
INITIAL_STATE = "initial_state"
CREATED = "created"
UPDATED = "updated"
DELETED = "deleted"
CONTENT_ADDED = "content_added"

View File

@@ -12,7 +12,6 @@ from homeassistant.components import http, websocket_api
from homeassistant.components.http.data_validator import RequestDataValidator
from homeassistant.const import MATCH_ALL
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.chat_session import async_get_chat_session
from homeassistant.util import language as language_util
from .agent_manager import (
@@ -21,8 +20,7 @@ from .agent_manager import (
async_get_agent,
get_agent_manager,
)
from .chat_log import DATA_CHAT_LOGS, async_get_chat_log, async_subscribe_chat_logs
from .const import DATA_COMPONENT, ChatLogEventType
from .const import DATA_COMPONENT
from .entity import ConversationEntity
from .models import ConversationInput
@@ -37,8 +35,6 @@ def async_setup(hass: HomeAssistant) -> None:
websocket_api.async_register_command(hass, websocket_list_sentences)
websocket_api.async_register_command(hass, websocket_hass_agent_debug)
websocket_api.async_register_command(hass, websocket_hass_agent_language_scores)
websocket_api.async_register_command(hass, websocket_subscribe_chat_log)
websocket_api.async_register_command(hass, websocket_subscribe_chat_log_index)
@websocket_api.websocket_command(
@@ -269,114 +265,3 @@ class ConversationProcessView(http.HomeAssistantView):
)
return self.json(result.as_dict())
@websocket_api.websocket_command(
{
vol.Required("type"): "conversation/chat_log/subscribe",
vol.Required("conversation_id"): str,
}
)
@websocket_api.require_admin
def websocket_subscribe_chat_log(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to a chat log."""
msg_id = msg["id"]
subscribed_conversation = msg["conversation_id"]
chat_logs = hass.data.get(DATA_CHAT_LOGS)
if not chat_logs or subscribed_conversation not in chat_logs:
connection.send_error(
msg_id,
websocket_api.ERR_NOT_FOUND,
"Conversation chat log not found",
)
return
@callback
def forward_events(conversation_id: str, event_type: str, data: dict) -> None:
"""Forward chat log events to websocket connection."""
if conversation_id != subscribed_conversation:
return
connection.send_event(
msg_id,
{
"conversation_id": conversation_id,
"event_type": event_type,
"data": data,
},
)
if event_type == ChatLogEventType.DELETED:
unsubscribe()
del connection.subscriptions[msg_id]
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
connection.subscriptions[msg_id] = unsubscribe
connection.send_result(msg_id)
with (
async_get_chat_session(hass, subscribed_conversation) as session,
async_get_chat_log(hass, session) as chat_log,
):
connection.send_event(
msg_id,
{
"event_type": ChatLogEventType.INITIAL_STATE,
"data": chat_log.as_dict(),
},
)
@websocket_api.websocket_command(
{
vol.Required("type"): "conversation/chat_log/subscribe_index",
}
)
@websocket_api.require_admin
def websocket_subscribe_chat_log_index(
hass: HomeAssistant,
connection: websocket_api.ActiveConnection,
msg: dict[str, Any],
) -> None:
"""Subscribe to a chat log."""
msg_id = msg["id"]
@callback
def forward_events(
conversation_id: str, event_type: ChatLogEventType, data: dict
) -> None:
"""Forward chat log events to websocket connection."""
if event_type not in (ChatLogEventType.CREATED, ChatLogEventType.DELETED):
return
connection.send_event(
msg_id,
{
"conversation_id": conversation_id,
"event_type": event_type,
"data": data,
},
)
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
connection.subscriptions[msg["id"]] = unsubscribe
connection.send_result(msg["id"])
chat_logs = hass.data.get(DATA_CHAT_LOGS)
if not chat_logs:
return
connection.send_event(
msg_id,
{
"event_type": ChatLogEventType.INITIAL_STATE,
"data": [c.as_dict() for c in chat_logs.values()],
},
)

View File

@@ -6,5 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/conversation",
"integration_type": "entity",
"quality_scale": "internal",
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.24"]
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
}

View File

@@ -108,34 +108,5 @@
"toggle_cover_tilt": {
"service": "mdi:arrow-top-right-bottom-left"
}
},
"triggers": {
"awning_opened": {
"trigger": "mdi:awning-outline"
},
"blind_opened": {
"trigger": "mdi:blinds-horizontal"
},
"curtain_opened": {
"trigger": "mdi:curtains"
},
"door_opened": {
"trigger": "mdi:door-open"
},
"garage_opened": {
"trigger": "mdi:garage-open"
},
"gate_opened": {
"trigger": "mdi:gate-open"
},
"shade_opened": {
"trigger": "mdi:roller-shade"
},
"shutter_opened": {
"trigger": "mdi:window-shutter-open"
},
"window_opened": {
"trigger": "mdi:window-open"
}
}
}

View File

@@ -1,16 +1,4 @@
{
"common": {
"trigger_behavior_description_awning": "The behavior of the targeted awnings to trigger on.",
"trigger_behavior_description_blind": "The behavior of the targeted blinds to trigger on.",
"trigger_behavior_description_curtain": "The behavior of the targeted curtains to trigger on.",
"trigger_behavior_description_door": "The behavior of the targeted doors to trigger on.",
"trigger_behavior_description_garage": "The behavior of the targeted garage doors to trigger on.",
"trigger_behavior_description_gate": "The behavior of the targeted gates to trigger on.",
"trigger_behavior_description_shade": "The behavior of the targeted shades to trigger on.",
"trigger_behavior_description_shutter": "The behavior of the targeted shutters to trigger on.",
"trigger_behavior_description_window": "The behavior of the targeted windows to trigger on.",
"trigger_behavior_name": "Behavior"
},
"device_automation": {
"action_type": {
"close": "Close {entity_name}",
@@ -94,15 +82,6 @@
"name": "Window"
}
},
"selector": {
"trigger_behavior": {
"options": {
"any": "Any",
"first": "First",
"last": "Last"
}
}
},
"services": {
"close_cover": {
"description": "Closes a cover.",
@@ -157,142 +136,5 @@
"name": "Toggle tilt"
}
},
"title": "Cover",
"triggers": {
"awning_opened": {
"description": "Triggers when an awning opens.",
"description_configured": "[%key:component::cover::triggers::awning_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_awning%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the awnings to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When an awning opens"
},
"blind_opened": {
"description": "Triggers when a blind opens.",
"description_configured": "[%key:component::cover::triggers::blind_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_blind%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the blinds to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When a blind opens"
},
"curtain_opened": {
"description": "Triggers when a curtain opens.",
"description_configured": "[%key:component::cover::triggers::curtain_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_curtain%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the curtains to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When a curtain opens"
},
"door_opened": {
"description": "Triggers when a door opens.",
"description_configured": "[%key:component::cover::triggers::door_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_door%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the doors to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When a door opens"
},
"garage_opened": {
"description": "Triggers when a garage door opens.",
"description_configured": "[%key:component::cover::triggers::garage_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_garage%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the garage doors to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When a garage door opens"
},
"gate_opened": {
"description": "Triggers when a gate opens.",
"description_configured": "[%key:component::cover::triggers::gate_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_gate%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the gates to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When a gate opens"
},
"shade_opened": {
"description": "Triggers when a shade opens.",
"description_configured": "[%key:component::cover::triggers::shade_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_shade%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the shades to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When a shade opens"
},
"shutter_opened": {
"description": "Triggers when a shutter opens.",
"description_configured": "[%key:component::cover::triggers::shutter_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_shutter%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the shutters to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When a shutter opens"
},
"window_opened": {
"description": "Triggers when a window opens.",
"description_configured": "[%key:component::cover::triggers::window_opened::description%]",
"fields": {
"behavior": {
"description": "[%key:component::cover::common::trigger_behavior_description_window%]",
"name": "[%key:component::cover::common::trigger_behavior_name%]"
},
"fully_opened": {
"description": "Require the windows to be fully opened before triggering.",
"name": "Fully opened"
}
},
"name": "When a window opens"
}
}
"title": "Cover"
}

View File

@@ -1,116 +0,0 @@
"""Provides triggers for covers."""
from typing import Final
import voluptuous as vol
from homeassistant.const import CONF_OPTIONS
from homeassistant.core import HomeAssistant, State
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity import get_device_class
from homeassistant.helpers.trigger import (
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
EntityTriggerBase,
Trigger,
TriggerConfig,
)
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
from . import ATTR_CURRENT_POSITION, CoverDeviceClass, CoverState
from .const import DOMAIN
ATTR_FULLY_OPENED: Final = "fully_opened"
COVER_OPENED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend(
{
vol.Required(CONF_OPTIONS): {
vol.Required(ATTR_FULLY_OPENED, default=False): bool,
},
}
)
def get_device_class_or_undefined(
hass: HomeAssistant, entity_id: str
) -> str | None | UndefinedType:
"""Get the device class of an entity or UNDEFINED if not found."""
try:
return get_device_class(hass, entity_id)
except HomeAssistantError:
return UNDEFINED
class CoverOpenedClosedTrigger(EntityTriggerBase):
"""Class for cover opened and closed triggers."""
_attribute: str = ATTR_CURRENT_POSITION
_attribute_value: int | None = None
_device_class: CoverDeviceClass | None
_domain: str = DOMAIN
_to_states: set[str]
def is_to_state(self, state: State) -> bool:
"""Check if the state matches the target state."""
if state.state not in self._to_states:
return False
if (
self._attribute_value is not None
and (value := state.attributes.get(self._attribute)) is not None
and value != self._attribute_value
):
return False
return True
def entity_filter(self, entities: set[str]) -> set[str]:
"""Filter entities of this domain."""
entities = super().entity_filter(entities)
return {
entity_id
for entity_id in entities
if get_device_class_or_undefined(self._hass, entity_id)
== self._device_class
}
class CoverOpenedTrigger(CoverOpenedClosedTrigger):
"""Class for cover opened triggers."""
_schema = COVER_OPENED_TRIGGER_SCHEMA
_to_states = {CoverState.OPEN, CoverState.OPENING}
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
"""Initialize the state trigger."""
super().__init__(hass, config)
if self._options.get(ATTR_FULLY_OPENED):
self._attribute_value = 100
def make_cover_opened_trigger(
device_class: CoverDeviceClass | None,
) -> type[CoverOpenedTrigger]:
"""Create an entity state attribute trigger class."""
class CustomTrigger(CoverOpenedTrigger):
"""Trigger for entity state changes."""
_device_class = device_class
return CustomTrigger
TRIGGERS: dict[str, type[Trigger]] = {
"awning_opened": make_cover_opened_trigger(CoverDeviceClass.AWNING),
"blind_opened": make_cover_opened_trigger(CoverDeviceClass.BLIND),
"curtain_opened": make_cover_opened_trigger(CoverDeviceClass.CURTAIN),
"door_opened": make_cover_opened_trigger(CoverDeviceClass.DOOR),
"garage_opened": make_cover_opened_trigger(CoverDeviceClass.GARAGE),
"gate_opened": make_cover_opened_trigger(CoverDeviceClass.GATE),
"shade_opened": make_cover_opened_trigger(CoverDeviceClass.SHADE),
"shutter_opened": make_cover_opened_trigger(CoverDeviceClass.SHUTTER),
"window_opened": make_cover_opened_trigger(CoverDeviceClass.WINDOW),
}
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
"""Return the triggers for covers."""
return TRIGGERS

View File

@@ -1,79 +0,0 @@
.trigger_common_fields: &trigger_common_fields
behavior:
required: true
default: any
selector:
select:
translation_key: trigger_behavior
options:
- first
- last
- any
fully_opened:
required: true
default: false
selector:
boolean:
awning_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: awning
blind_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: blind
curtain_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: curtain
door_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: door
garage_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: garage
gate_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: gate
shade_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: shade
shutter_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: shutter
window_opened:
fields: *trigger_common_fields
target:
entity:
domain: cover
device_class: window

View File

@@ -6,5 +6,5 @@
"integration_type": "service",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["debugpy==1.8.17"]
"requirements": ["debugpy==1.8.16"]
}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_push",
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
"requirements": ["py-sucks==0.9.11", "deebot-client==16.4.0"]
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
}

View File

@@ -278,18 +278,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> boo
for keypad in elk.keypads:
keypad.add_callback(_keypad_changed)
sync_success = False
try:
await ElkSyncWaiter(elk, LOGIN_TIMEOUT, SYNC_TIMEOUT).async_wait()
sync_success = True
except LoginFailed:
_LOGGER.error("ElkM1 login failed for %s", conf[CONF_HOST])
return False
if not await async_wait_for_elk_to_sync(elk, LOGIN_TIMEOUT, SYNC_TIMEOUT):
return False
except TimeoutError as exc:
raise ConfigEntryNotReady(f"Timed out connecting to {conf[CONF_HOST]}") from exc
finally:
if not sync_success:
elk.disconnect()
elk_temp_unit = elk.panel.temperature_units
if elk_temp_unit == "C":
@@ -328,75 +321,48 @@ async def async_unload_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> bo
return unload_ok
class LoginFailed(Exception):
"""Raised when login to ElkM1 fails."""
async def async_wait_for_elk_to_sync(
elk: Elk,
login_timeout: int,
sync_timeout: int,
) -> bool:
"""Wait until the elk has finished sync. Can fail login or timeout."""
sync_event = asyncio.Event()
login_event = asyncio.Event()
class ElkSyncWaiter:
"""Wait for ElkM1 to sync."""
success = True
def __init__(self, elk: Elk, login_timeout: int, sync_timeout: int) -> None:
"""Initialize the sync waiter."""
self._elk = elk
self._login_timeout = login_timeout
self._sync_timeout = sync_timeout
self._loop = asyncio.get_running_loop()
self._sync_future: asyncio.Future[None] = self._loop.create_future()
self._login_future: asyncio.Future[None] = self._loop.create_future()
def login_status(succeeded: bool) -> None:
nonlocal success
@callback
def _async_set_future_if_not_done(self, future: asyncio.Future[None]) -> None:
"""Set the future result if not already done."""
if not future.done():
future.set_result(None)
@callback
def _async_login_status(self, succeeded: bool) -> None:
"""Handle login status callback."""
success = succeeded
if succeeded:
_LOGGER.debug("ElkM1 login succeeded")
self._async_set_future_if_not_done(self._login_future)
login_event.set()
else:
elk.disconnect()
_LOGGER.error("ElkM1 login failed; invalid username or password")
self._async_set_exception_if_not_done(self._login_future, LoginFailed)
login_event.set()
sync_event.set()
@callback
def _async_set_exception_if_not_done(
self, future: asyncio.Future[None], exception: type[Exception]
) -> None:
"""Set an exception on the future if not already done."""
if not future.done():
future.set_exception(exception())
@callback
def _async_sync_complete(self) -> None:
"""Handle sync complete callback."""
self._async_set_future_if_not_done(self._sync_future)
async def async_wait(self) -> None:
"""Wait for login and sync to complete.
Raises LoginFailed if login fails.
Raises TimeoutError if login or sync times out.
"""
self._elk.add_handler("login", self._async_login_status)
self._elk.add_handler("sync_complete", self._async_sync_complete)
def sync_complete() -> None:
sync_event.set()
elk.add_handler("login", login_status)
elk.add_handler("sync_complete", sync_complete)
for name, event, timeout in (
("login", login_event, login_timeout),
("sync_complete", sync_event, sync_timeout),
):
_LOGGER.debug("Waiting for %s event for %s seconds", name, timeout)
try:
for name, future, timeout in (
("login", self._login_future, self._login_timeout),
("sync_complete", self._sync_future, self._sync_timeout),
):
_LOGGER.debug("Waiting for %s event for %s seconds", name, timeout)
handle = self._loop.call_later(
timeout, self._async_set_exception_if_not_done, future, TimeoutError
)
try:
await future
finally:
handle.cancel()
async with asyncio.timeout(timeout):
await event.wait()
except TimeoutError:
_LOGGER.debug("Timed out waiting for %s event", name)
elk.disconnect()
raise
_LOGGER.debug("Received %s event", name)
_LOGGER.debug("Received %s event", name)
finally:
self._elk.remove_handler("login", self._async_login_status)
self._elk.remove_handler("sync_complete", self._async_sync_complete)
return success

View File

@@ -25,7 +25,7 @@ from homeassistant.helpers.typing import DiscoveryInfoType, VolDictType
from homeassistant.util import slugify
from homeassistant.util.network import is_ip_address
from . import ElkSyncWaiter, LoginFailed, hostname_from_url
from . import async_wait_for_elk_to_sync, hostname_from_url
from .const import CONF_AUTO_CONFIGURE, DISCOVER_SCAN_TIMEOUT, DOMAIN, LOGIN_TIMEOUT
from .discovery import (
_short_mac,
@@ -89,9 +89,8 @@ async def validate_input(data: dict[str, str], mac: str | None) -> dict[str, str
elk.connect()
try:
await ElkSyncWaiter(elk, LOGIN_TIMEOUT, VALIDATE_TIMEOUT).async_wait()
except LoginFailed as exc:
raise InvalidAuth from exc
if not await async_wait_for_elk_to_sync(elk, LOGIN_TIMEOUT, VALIDATE_TIMEOUT):
raise InvalidAuth
finally:
elk.disconnect()

View File

@@ -15,5 +15,5 @@
"documentation": "https://www.home-assistant.io/integrations/elkm1",
"iot_class": "local_push",
"loggers": ["elkm1_lib"],
"requirements": ["elkm1-lib==2.2.13"]
"requirements": ["elkm1-lib==2.2.12"]
}

View File

@@ -5,7 +5,7 @@ from __future__ import annotations
import asyncio
from collections import Counter
from collections.abc import Awaitable, Callable
from typing import Any, Literal, NotRequired, TypedDict
from typing import Literal, NotRequired, TypedDict
import voluptuous as vol
@@ -15,7 +15,6 @@ from homeassistant.helpers import config_validation as cv, singleton, storage
from .const import DOMAIN
STORAGE_VERSION = 1
STORAGE_MINOR_VERSION = 2
STORAGE_KEY = DOMAIN
@@ -165,7 +164,6 @@ class EnergyPreferences(TypedDict):
energy_sources: list[SourceType]
device_consumption: list[DeviceConsumption]
device_consumption_water: NotRequired[list[DeviceConsumption]]
class EnergyPreferencesUpdate(EnergyPreferences, total=False):
@@ -330,31 +328,14 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
)
class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
"""Energy preferences store with migration support."""
async def _async_migrate_func(
self,
old_major_version: int,
old_minor_version: int,
old_data: dict[str, Any],
) -> dict[str, Any]:
"""Migrate to the new version."""
data = old_data
if old_major_version == 1 and old_minor_version < 2:
# Add device_consumption_water field if it doesn't exist
data.setdefault("device_consumption_water", [])
return data
class EnergyManager:
"""Manage the instance energy prefs."""
def __init__(self, hass: HomeAssistant) -> None:
"""Initialize energy manager."""
self._hass = hass
self._store = _EnergyPreferencesStore(
hass, STORAGE_VERSION, STORAGE_KEY, minor_version=STORAGE_MINOR_VERSION
self._store = storage.Store[EnergyPreferences](
hass, STORAGE_VERSION, STORAGE_KEY
)
self.data: EnergyPreferences | None = None
self._update_listeners: list[Callable[[], Awaitable]] = []
@@ -369,7 +350,6 @@ class EnergyManager:
return {
"energy_sources": [],
"device_consumption": [],
"device_consumption_water": [],
}
async def async_update(self, update: EnergyPreferencesUpdate) -> None:
@@ -382,7 +362,6 @@ class EnergyManager:
for key in (
"energy_sources",
"device_consumption",
"device_consumption_water",
):
if key in update:
data[key] = update[key]

View File

@@ -153,9 +153,6 @@ class EnergyPreferencesValidation:
energy_sources: list[ValidationIssues] = dataclasses.field(default_factory=list)
device_consumption: list[ValidationIssues] = dataclasses.field(default_factory=list)
device_consumption_water: list[ValidationIssues] = dataclasses.field(
default_factory=list
)
def as_dict(self) -> dict:
"""Return dictionary version."""
@@ -168,10 +165,6 @@ class EnergyPreferencesValidation:
[dataclasses.asdict(issue) for issue in issues.issues.values()]
for issues in self.device_consumption
],
"device_consumption_water": [
[dataclasses.asdict(issue) for issue in issues.issues.values()]
for issues in self.device_consumption_water
],
}
@@ -749,23 +742,6 @@ async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
)
)
for device in manager.data.get("device_consumption_water", []):
device_result = ValidationIssues()
result.device_consumption_water.append(device_result)
wanted_statistics_metadata.add(device["stat_consumption"])
validate_calls.append(
functools.partial(
_async_validate_usage_stat,
hass,
statistics_metadata,
device["stat_consumption"],
WATER_USAGE_DEVICE_CLASSES,
WATER_USAGE_UNITS,
WATER_UNIT_ERROR,
device_result,
)
)
# Fetch the needed statistics metadata
statistics_metadata.update(
await recorder.get_instance(hass).async_add_executor_job(

View File

@@ -129,7 +129,6 @@ def ws_get_prefs(
vol.Required("type"): "energy/save_prefs",
vol.Optional("energy_sources"): ENERGY_SOURCE_SCHEMA,
vol.Optional("device_consumption"): [DEVICE_CONSUMPTION_SCHEMA],
vol.Optional("device_consumption_water"): [DEVICE_CONSUMPTION_SCHEMA],
}
)
@websocket_api.async_response

View File

@@ -1,401 +0,0 @@
"""The EnergyID integration."""
from __future__ import annotations
from dataclasses import dataclass
import datetime as dt
from datetime import timedelta
import functools
import logging
from aiohttp import ClientError, ClientResponseError
from energyid_webhooks.client_v2 import WebhookClient
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
from homeassistant.core import (
CALLBACK_TYPE,
Event,
EventStateChangedData,
HomeAssistant,
callback,
)
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import (
async_track_entity_registry_updated_event,
async_track_state_change_event,
async_track_time_interval,
)
from .const import (
CONF_DEVICE_ID,
CONF_DEVICE_NAME,
CONF_ENERGYID_KEY,
CONF_HA_ENTITY_UUID,
CONF_PROVISIONING_KEY,
CONF_PROVISIONING_SECRET,
DOMAIN,
)
_LOGGER = logging.getLogger(__name__)
type EnergyIDConfigEntry = ConfigEntry[EnergyIDRuntimeData]
DEFAULT_UPLOAD_INTERVAL_SECONDS = 60
@dataclass
class EnergyIDRuntimeData:
"""Runtime data for the EnergyID integration."""
client: WebhookClient
mappings: dict[str, str]
state_listener: CALLBACK_TYPE | None = None
registry_tracking_listener: CALLBACK_TYPE | None = None
unavailable_logged: bool = False
async def async_setup_entry(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> bool:
"""Set up EnergyID from a config entry."""
session = async_get_clientsession(hass)
client = WebhookClient(
provisioning_key=entry.data[CONF_PROVISIONING_KEY],
provisioning_secret=entry.data[CONF_PROVISIONING_SECRET],
device_id=entry.data[CONF_DEVICE_ID],
device_name=entry.data[CONF_DEVICE_NAME],
session=session,
)
entry.runtime_data = EnergyIDRuntimeData(
client=client,
mappings={},
)
is_claimed = None
try:
is_claimed = await client.authenticate()
except TimeoutError as err:
raise ConfigEntryNotReady(
f"Timeout authenticating with EnergyID: {err}"
) from err
except ClientResponseError as err:
# 401/403 = invalid credentials, trigger reauth
if err.status in (401, 403):
raise ConfigEntryAuthFailed(f"Invalid credentials: {err}") from err
# Other HTTP errors are likely temporary
raise ConfigEntryNotReady(
f"HTTP error authenticating with EnergyID: {err}"
) from err
except ClientError as err:
# Network/connection errors are temporary
raise ConfigEntryNotReady(
f"Connection error authenticating with EnergyID: {err}"
) from err
except Exception as err:
# Unknown errors - log and retry (safer than forcing reauth)
_LOGGER.exception("Unexpected error during EnergyID authentication")
raise ConfigEntryNotReady(
f"Unexpected error authenticating with EnergyID: {err}"
) from err
if not is_claimed:
# Device exists but not claimed = user needs to claim it = auth issue
raise ConfigEntryAuthFailed("Device is not claimed. Please re-authenticate.")
_LOGGER.debug("EnergyID device '%s' authenticated successfully", client.device_name)
async def _async_synchronize_sensors(now: dt.datetime | None = None) -> None:
"""Callback for periodically synchronizing sensor data."""
try:
await client.synchronize_sensors()
if entry.runtime_data.unavailable_logged:
_LOGGER.debug("Connection to EnergyID re-established")
entry.runtime_data.unavailable_logged = False
except (OSError, RuntimeError) as err:
if not entry.runtime_data.unavailable_logged:
_LOGGER.debug("EnergyID is unavailable: %s", err)
entry.runtime_data.unavailable_logged = True
upload_interval = DEFAULT_UPLOAD_INTERVAL_SECONDS
if client.webhook_policy:
upload_interval = client.webhook_policy.get(
"uploadInterval", DEFAULT_UPLOAD_INTERVAL_SECONDS
)
# Schedule the callback and automatically unsubscribe when the entry is unloaded.
entry.async_on_unload(
async_track_time_interval(
hass, _async_synchronize_sensors, timedelta(seconds=upload_interval)
)
)
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
update_listeners(hass, entry)
_LOGGER.debug(
"Starting EnergyID background sync for '%s'",
client.device_name,
)
return True
async def config_entry_update_listener(
hass: HomeAssistant, entry: EnergyIDConfigEntry
) -> None:
"""Handle config entry updates, including subentry changes."""
_LOGGER.debug("Config entry updated for %s, reloading listeners", entry.entry_id)
update_listeners(hass, entry)
@callback
def update_listeners(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> None:
"""Set up or update state listeners and queue initial states."""
runtime_data = entry.runtime_data
client = runtime_data.client
# Clean up old state listener
if runtime_data.state_listener:
runtime_data.state_listener()
runtime_data.state_listener = None
mappings: dict[str, str] = {}
entities_to_track: list[str] = []
old_mappings = set(runtime_data.mappings.keys())
new_mappings = set()
ent_reg = er.async_get(hass)
subentries = list(entry.subentries.values())
_LOGGER.debug(
"Found %d subentries in entry.subentries: %s",
len(subentries),
[s.data for s in subentries],
)
# Build current entity mappings
tracked_entity_ids = []
for subentry in subentries:
entity_uuid = subentry.data.get(CONF_HA_ENTITY_UUID)
energyid_key = subentry.data.get(CONF_ENERGYID_KEY)
if not (entity_uuid and energyid_key):
continue
entity_entry = ent_reg.async_get(entity_uuid)
if not entity_entry:
_LOGGER.warning(
"Entity with UUID %s does not exist, skipping mapping to %s",
entity_uuid,
energyid_key,
)
continue
ha_entity_id = entity_entry.entity_id
tracked_entity_ids.append(ha_entity_id)
if not hass.states.get(ha_entity_id):
# Entity exists in registry but is not present in the state machine
_LOGGER.debug(
"Entity %s does not exist in state machine yet, will track when available (mapping to %s)",
ha_entity_id,
energyid_key,
)
# Still add to entities_to_track so we can handle it when state appears
entities_to_track.append(ha_entity_id)
continue
mappings[ha_entity_id] = energyid_key
entities_to_track.append(ha_entity_id)
new_mappings.add(ha_entity_id)
client.get_or_create_sensor(energyid_key)
if ha_entity_id not in old_mappings:
_LOGGER.debug(
"New mapping detected for %s, queuing initial state", ha_entity_id
)
if (
current_state := hass.states.get(ha_entity_id)
) and current_state.state not in (
STATE_UNKNOWN,
STATE_UNAVAILABLE,
):
try:
value = float(current_state.state)
timestamp = current_state.last_updated or dt.datetime.now(dt.UTC)
client.get_or_create_sensor(energyid_key).update(value, timestamp)
except (ValueError, TypeError):
_LOGGER.debug(
"Could not convert initial state of %s to float: %s",
ha_entity_id,
current_state.state,
)
# Clean up old entity registry listener
if runtime_data.registry_tracking_listener:
runtime_data.registry_tracking_listener()
runtime_data.registry_tracking_listener = None
# Set up listeners for entity registry changes
if tracked_entity_ids:
_LOGGER.debug("Setting up entity registry tracking for: %s", tracked_entity_ids)
def _handle_entity_registry_change(
event: Event[er.EventEntityRegistryUpdatedData],
) -> None:
"""Handle entity registry changes for our tracked entities."""
_LOGGER.debug("Registry event for tracked entity: %s", event.data)
if event.data["action"] == "update":
# Type is now narrowed to _EventEntityRegistryUpdatedData_Update
if "entity_id" in event.data["changes"]:
old_entity_id = event.data["changes"]["entity_id"]
new_entity_id = event.data["entity_id"]
_LOGGER.debug(
"Tracked entity ID changed: %s -> %s",
old_entity_id,
new_entity_id,
)
# Entity ID changed, need to reload listeners to track new ID
update_listeners(hass, entry)
elif event.data["action"] == "remove":
_LOGGER.debug("Tracked entity removed: %s", event.data["entity_id"])
# reminder: Create repair issue to notify user about removed entity
update_listeners(hass, entry)
# Track the specific entity IDs we care about
unsub_entity_registry = async_track_entity_registry_updated_event(
hass, tracked_entity_ids, _handle_entity_registry_change
)
runtime_data.registry_tracking_listener = unsub_entity_registry
if removed_mappings := old_mappings - new_mappings:
_LOGGER.debug("Removed mappings: %s", ", ".join(removed_mappings))
runtime_data.mappings = mappings
if not entities_to_track:
_LOGGER.debug(
"No valid sensor mappings configured for '%s'", client.device_name
)
return
unsub_state_change = async_track_state_change_event(
hass,
entities_to_track,
functools.partial(_async_handle_state_change, hass, entry.entry_id),
)
runtime_data.state_listener = unsub_state_change
_LOGGER.debug(
"Now tracking state changes for %d entities for '%s': %s",
len(entities_to_track),
client.device_name,
entities_to_track,
)
@callback
def _async_handle_state_change(
hass: HomeAssistant, entry_id: str, event: Event[EventStateChangedData]
) -> None:
"""Handle state changes for tracked entities."""
entity_id = event.data["entity_id"]
new_state = event.data["new_state"]
_LOGGER.debug(
"State change detected for entity: %s, new value: %s",
entity_id,
new_state.state if new_state else "None",
)
if not new_state or new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
return
entry = hass.config_entries.async_get_entry(entry_id)
if not entry or not hasattr(entry, "runtime_data"):
# Entry is being unloaded or not yet fully initialized
return
runtime_data = entry.runtime_data
client = runtime_data.client
# Check if entity is already mapped
if energyid_key := runtime_data.mappings.get(entity_id):
# Entity already mapped, just update value
_LOGGER.debug(
"Updating EnergyID sensor %s with value %s", energyid_key, new_state.state
)
else:
# Entity not mapped yet - check if it should be (handles late-appearing entities)
ent_reg = er.async_get(hass)
for subentry in entry.subentries.values():
entity_uuid = subentry.data.get(CONF_HA_ENTITY_UUID)
energyid_key_candidate = subentry.data.get(CONF_ENERGYID_KEY)
if not (entity_uuid and energyid_key_candidate):
continue
entity_entry = ent_reg.async_get(entity_uuid)
if entity_entry and entity_entry.entity_id == entity_id:
# Found it! Add to mappings and send initial value
energyid_key = energyid_key_candidate
runtime_data.mappings[entity_id] = energyid_key
client.get_or_create_sensor(energyid_key)
_LOGGER.debug(
"Entity %s now available in state machine, adding to mappings (key: %s)",
entity_id,
energyid_key,
)
break
else:
# Not a tracked entity, ignore
return
try:
value = float(new_state.state)
except (ValueError, TypeError):
return
client.get_or_create_sensor(energyid_key).update(value, new_state.last_updated)
async def async_unload_entry(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> bool:
"""Unload a config entry."""
_LOGGER.debug("Unloading EnergyID entry for %s", entry.title)
try:
# Unload subentries if present (guarded for test and reload scenarios)
if hasattr(hass.config_entries, "async_entries") and hasattr(entry, "entry_id"):
subentries = [
e.entry_id
for e in hass.config_entries.async_entries(DOMAIN)
if getattr(e, "parent_entry", None) == entry.entry_id
]
for subentry_id in subentries:
await hass.config_entries.async_unload(subentry_id)
# Only clean up listeners and client if runtime_data is present
if hasattr(entry, "runtime_data"):
runtime_data = entry.runtime_data
# Remove state listener
if runtime_data.state_listener:
runtime_data.state_listener()
# Remove registry tracking listener
if runtime_data.registry_tracking_listener:
runtime_data.registry_tracking_listener()
try:
await runtime_data.client.close()
except Exception:
_LOGGER.exception("Error closing EnergyID client for %s", entry.title)
del entry.runtime_data
except Exception:
_LOGGER.exception("Error during async_unload_entry for %s", entry.title)
return False
return True

View File

@@ -1,293 +0,0 @@
"""Config flow for EnergyID integration."""
import asyncio
from collections.abc import Mapping
import logging
from typing import Any
from aiohttp import ClientError, ClientResponseError
from energyid_webhooks.client_v2 import WebhookClient
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryFlow,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.instance_id import async_get as async_get_instance_id
from .const import (
CONF_DEVICE_ID,
CONF_DEVICE_NAME,
CONF_PROVISIONING_KEY,
CONF_PROVISIONING_SECRET,
DOMAIN,
ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX,
MAX_POLLING_ATTEMPTS,
NAME,
POLLING_INTERVAL,
)
from .energyid_sensor_mapping_flow import EnergyIDSensorMappingFlowHandler
_LOGGER = logging.getLogger(__name__)
class EnergyIDConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle the configuration flow for the EnergyID integration."""
def __init__(self) -> None:
"""Initialize the config flow."""
self._flow_data: dict[str, Any] = {}
self._polling_task: asyncio.Task | None = None
async def _perform_auth_and_get_details(self) -> str | None:
"""Authenticate with EnergyID and retrieve device details."""
_LOGGER.debug("Starting authentication with EnergyID")
client = WebhookClient(
provisioning_key=self._flow_data[CONF_PROVISIONING_KEY],
provisioning_secret=self._flow_data[CONF_PROVISIONING_SECRET],
device_id=self._flow_data[CONF_DEVICE_ID],
device_name=self._flow_data[CONF_DEVICE_NAME],
session=async_get_clientsession(self.hass),
)
try:
is_claimed = await client.authenticate()
except ClientResponseError as err:
if err.status == 401:
_LOGGER.debug("Invalid provisioning key or secret")
return "invalid_auth"
_LOGGER.debug(
"Client response error during EnergyID authentication: %s", err
)
return "cannot_connect"
except ClientError as err:
_LOGGER.debug(
"Failed to connect to EnergyID during authentication: %s", err
)
return "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected error during EnergyID authentication")
return "unknown_auth_error"
else:
_LOGGER.debug("Authentication successful, claimed: %s", is_claimed)
if is_claimed:
self._flow_data["record_number"] = client.recordNumber
self._flow_data["record_name"] = client.recordName
_LOGGER.debug(
"Device claimed with record number: %s, record name: %s",
client.recordNumber,
client.recordName,
)
return None
self._flow_data["claim_info"] = client.get_claim_info()
self._flow_data["claim_info"]["integration_name"] = NAME
_LOGGER.debug(
"Device needs claim, claim info: %s", self._flow_data["claim_info"]
)
return "needs_claim"
async def _async_poll_for_claim(self) -> None:
"""Poll EnergyID to check if device has been claimed."""
for _attempt in range(1, MAX_POLLING_ATTEMPTS + 1):
await asyncio.sleep(POLLING_INTERVAL)
auth_status = await self._perform_auth_and_get_details()
if auth_status is None:
# Device claimed - advance flow to async_step_create_entry
_LOGGER.debug("Device claimed, advancing to create entry")
self.hass.async_create_task(
self.hass.config_entries.flow.async_configure(self.flow_id)
)
return
if auth_status != "needs_claim":
# Stop polling on non-transient errors
# No user notification needed here as the error will be handled
# in the next flow step when the user continues the flow
_LOGGER.debug("Polling stopped due to error: %s", auth_status)
return
_LOGGER.debug("Polling timeout after %s attempts", MAX_POLLING_ATTEMPTS)
# No user notification here because:
# 1. User may still be completing the claim process in EnergyID portal
# 2. Immediate notification could interrupt their workflow or cause confusion
# 3. When user clicks "Submit" to continue, the flow validates claim status
# and will show appropriate error/success messages based on current state
# 4. Timeout allows graceful fallback: user can retry claim or see proper error
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step of the configuration flow."""
_LOGGER.debug("Starting user step with input: %s", user_input)
errors: dict[str, str] = {}
if user_input is not None:
instance_id = await async_get_instance_id(self.hass)
# Note: This device_id is for EnergyID's webhook system, not related to HA's device registry
device_suffix = f"{int(asyncio.get_event_loop().time() * 1000)}"
device_id = (
f"{ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX}{instance_id}_{device_suffix}"
)
self._flow_data = {
**user_input,
CONF_DEVICE_ID: device_id,
CONF_DEVICE_NAME: self.hass.config.location_name,
}
_LOGGER.debug("Flow data after user input: %s", self._flow_data)
auth_status = await self._perform_auth_and_get_details()
if auth_status is None:
await self.async_set_unique_id(device_id)
self._abort_if_unique_id_configured()
_LOGGER.debug(
"Creating entry with title: %s", self._flow_data["record_name"]
)
return self.async_create_entry(
title=self._flow_data["record_name"],
data=self._flow_data,
description="add_sensor_mapping_hint",
description_placeholders={"integration_name": NAME},
)
if auth_status == "needs_claim":
_LOGGER.debug("Redirecting to auth and claim step")
return await self.async_step_auth_and_claim()
errors["base"] = auth_status
_LOGGER.debug("Errors encountered during user step: %s", errors)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_PROVISIONING_KEY): str,
vol.Required(CONF_PROVISIONING_SECRET): cv.string,
}
),
errors=errors,
description_placeholders={
"docs_url": "https://app.energyid.eu/integrations/home-assistant",
"integration_name": NAME,
},
)
async def async_step_auth_and_claim(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the step for device claiming using external step with polling."""
_LOGGER.debug("Starting auth and claim step with input: %s", user_input)
claim_info = self._flow_data.get("claim_info", {})
# Start polling when we first enter this step
if self._polling_task is None:
self._polling_task = self.hass.async_create_task(
self._async_poll_for_claim()
)
# Show external step to open the EnergyID website
return self.async_external_step(
step_id="auth_and_claim",
url=claim_info.get("claim_url", ""),
description_placeholders=claim_info,
)
# Check if device has been claimed
auth_status = await self._perform_auth_and_get_details()
if auth_status is None:
# Device has been claimed
if self._polling_task and not self._polling_task.done():
self._polling_task.cancel()
self._polling_task = None
return self.async_external_step_done(next_step_id="create_entry")
# Device not claimed yet, show the external step again
if self._polling_task and not self._polling_task.done():
self._polling_task.cancel()
self._polling_task = None
return self.async_external_step(
step_id="auth_and_claim",
url=claim_info.get("claim_url", ""),
description_placeholders=claim_info,
)
async def async_step_create_entry(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Final step to create the entry after successful claim."""
_LOGGER.debug("Creating entry with title: %s", self._flow_data["record_name"])
return self.async_create_entry(
title=self._flow_data["record_name"],
data=self._flow_data,
description="add_sensor_mapping_hint",
description_placeholders={"integration_name": NAME},
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
# Note: This device_id is for EnergyID's webhook system, not related to HA's device registry
self._flow_data = {
CONF_DEVICE_ID: entry_data[CONF_DEVICE_ID],
CONF_DEVICE_NAME: entry_data[CONF_DEVICE_NAME],
}
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm reauthentication dialog."""
errors: dict[str, str] = {}
if user_input is not None:
self._flow_data.update(user_input)
auth_status = await self._perform_auth_and_get_details()
if auth_status is None:
# Authentication successful and claimed
await self.async_set_unique_id(self._flow_data["record_number"])
self._abort_if_unique_id_mismatch(reason="wrong_account")
return self.async_update_reload_and_abort(
self._get_reauth_entry(),
data_updates={
CONF_PROVISIONING_KEY: user_input[CONF_PROVISIONING_KEY],
CONF_PROVISIONING_SECRET: user_input[CONF_PROVISIONING_SECRET],
},
)
if auth_status == "needs_claim":
return await self.async_step_auth_and_claim()
errors["base"] = auth_status
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema(
{
vol.Required(CONF_PROVISIONING_KEY): str,
vol.Required(CONF_PROVISIONING_SECRET): cv.string,
}
),
errors=errors,
description_placeholders={
"docs_url": "https://app.energyid.eu/integrations/home-assistant",
"integration_name": NAME,
},
)
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {"sensor_mapping": EnergyIDSensorMappingFlowHandler}

View File

@@ -1,21 +0,0 @@
"""Constants for the EnergyID integration."""
from typing import Final
DOMAIN: Final = "energyid"
NAME: Final = "EnergyID"
# --- Config Flow and Entry Data ---
CONF_PROVISIONING_KEY: Final = "provisioning_key"
CONF_PROVISIONING_SECRET: Final = "provisioning_secret"
CONF_DEVICE_ID: Final = "device_id"
CONF_DEVICE_NAME: Final = "device_name"
# --- Subentry (Mapping) Data ---
CONF_HA_ENTITY_UUID: Final = "ha_entity_uuid"
CONF_ENERGYID_KEY: Final = "energyid_key"
# --- Webhook and Polling Configuration ---
ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX: Final = "homeassistant_eid_"
POLLING_INTERVAL: Final = 2 # seconds
MAX_POLLING_ATTEMPTS: Final = 60 # 2 minutes total

View File

@@ -1,156 +0,0 @@
"""Subentry flow for EnergyID integration, handling sensor mapping management."""
import logging
from typing import Any
import voluptuous as vol
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
from homeassistant.config_entries import ConfigSubentryFlow, SubentryFlowResult
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig
from .const import CONF_ENERGYID_KEY, CONF_HA_ENTITY_UUID, DOMAIN, NAME
_LOGGER = logging.getLogger(__name__)
@callback
def _get_suggested_entities(hass: HomeAssistant) -> list[str]:
"""Return a sorted list of suggested sensor entity IDs for mapping."""
ent_reg = er.async_get(hass)
suitable_entities = []
for entity_entry in ent_reg.entities.values():
if not (
entity_entry.domain == Platform.SENSOR and entity_entry.platform != DOMAIN
):
continue
if not hass.states.get(entity_entry.entity_id):
continue
state_class = (entity_entry.capabilities or {}).get("state_class")
has_numeric_indicators = (
state_class
in (
SensorStateClass.MEASUREMENT,
SensorStateClass.TOTAL,
SensorStateClass.TOTAL_INCREASING,
)
or entity_entry.device_class
in (
SensorDeviceClass.ENERGY,
SensorDeviceClass.GAS,
SensorDeviceClass.POWER,
SensorDeviceClass.TEMPERATURE,
SensorDeviceClass.VOLUME,
)
or entity_entry.original_device_class
in (
SensorDeviceClass.ENERGY,
SensorDeviceClass.GAS,
SensorDeviceClass.POWER,
SensorDeviceClass.TEMPERATURE,
SensorDeviceClass.VOLUME,
)
)
if has_numeric_indicators:
suitable_entities.append(entity_entry.entity_id)
return sorted(suitable_entities)
@callback
def _validate_mapping_input(
ha_entity_id: str | None,
current_mappings: set[str],
ent_reg: er.EntityRegistry,
) -> dict[str, str]:
"""Validate mapping input and return errors if any."""
errors: dict[str, str] = {}
if not ha_entity_id:
errors["base"] = "entity_required"
return errors
# Check if entity exists
entity_entry = ent_reg.async_get(ha_entity_id)
if not entity_entry:
errors["base"] = "entity_not_found"
return errors
# Check if entity is already mapped (by UUID)
entity_uuid = entity_entry.id
if entity_uuid in current_mappings:
errors["base"] = "entity_already_mapped"
return errors
class EnergyIDSensorMappingFlowHandler(ConfigSubentryFlow):
"""Handle EnergyID sensor mapping subentry flow for adding new mappings."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Handle the user step for adding a new sensor mapping."""
errors: dict[str, str] = {}
config_entry = self._get_entry()
ent_reg = er.async_get(self.hass)
if user_input is not None:
ha_entity_id = user_input.get("ha_entity_id")
# Get current mappings by UUID
current_mappings = {
uuid
for sub in config_entry.subentries.values()
if (uuid := sub.data.get(CONF_HA_ENTITY_UUID)) is not None
}
errors = _validate_mapping_input(ha_entity_id, current_mappings, ent_reg)
if not errors and ha_entity_id:
# Get entity registry entry
entity_entry = ent_reg.async_get(ha_entity_id)
if entity_entry:
energyid_key = ha_entity_id.split(".", 1)[-1]
subentry_data = {
CONF_HA_ENTITY_UUID: entity_entry.id, # Store UUID only
CONF_ENERGYID_KEY: energyid_key,
}
title = f"{ha_entity_id.split('.', 1)[-1]} connection to {NAME}"
_LOGGER.debug(
"Creating subentry with title='%s', data=%s",
title,
subentry_data,
)
_LOGGER.debug("Parent config entry ID: %s", config_entry.entry_id)
_LOGGER.debug(
"Creating subentry with parent: %s", self._get_entry().entry_id
)
return self.async_create_entry(title=title, data=subentry_data)
errors["base"] = "entity_not_found"
suggested_entities = _get_suggested_entities(self.hass)
data_schema = vol.Schema(
{
vol.Required("ha_entity_id"): EntitySelector(
EntitySelectorConfig(include_entities=suggested_entities)
),
}
)
return self.async_show_form(
step_id="user",
data_schema=data_schema,
errors=errors,
description_placeholders={"integration_name": NAME},
)

View File

@@ -1,12 +0,0 @@
{
"domain": "energyid",
"name": "EnergyID",
"codeowners": ["@JrtPec", "@Molier"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/energyid",
"integration_type": "service",
"iot_class": "cloud_push",
"loggers": ["energyid_webhooks"],
"quality_scale": "silver",
"requirements": ["energyid-webhooks==0.0.14"]
}

View File

@@ -1,137 +0,0 @@
rules:
# Bronze
action-setup:
status: exempt
comment: The integration does not expose any custom service actions.
appropriate-polling:
status: exempt
comment: The integration uses a push-based mechanism with a background sync task, not polling.
brands:
status: done
common-modules:
status: done
config-flow-test-coverage:
status: done
config-flow:
status: done
dependency-transparency:
status: done
docs-actions:
status: exempt
comment: The integration does not expose any custom service actions.
docs-high-level-description:
status: done
docs-installation-instructions:
status: done
docs-removal-instructions:
status: done
entity-event-setup:
status: exempt
comment: This integration does not create its own entities.
entity-unique-id:
status: exempt
comment: This integration does not create its own entities.
has-entity-name:
status: exempt
comment: This integration does not create its own entities.
runtime-data:
status: done
test-before-configure:
status: done
test-before-setup:
status: done
unique-config-entry:
status: done
# Silver
action-exceptions:
status: exempt
comment: The integration does not expose any custom service actions.
config-entry-unloading:
status: done
docs-configuration-parameters:
status: done
docs-installation-parameters:
status: done
entity-unavailable:
status: exempt
comment: This integration does not create its own entities.
integration-owner:
status: done
log-when-unavailable:
status: done
comment: The integration logs a single message when the EnergyID service is unavailable.
parallel-updates:
status: exempt
comment: This integration does not create its own entities.
reauthentication-flow:
status: done
test-coverage:
status: done
# Gold
devices:
status: exempt
comment: The integration does not create any entities, nor does it create devices.
diagnostics:
status: todo
comment: Diagnostics will be added in a follow-up PR to help with debugging.
discovery:
status: exempt
comment: Configuration requires manual entry of provisioning credentials.
discovery-update-info:
status: exempt
comment: No discovery mechanism is used.
docs-data-update:
status: done
docs-examples:
status: done
docs-known-limitations:
status: done
docs-supported-devices:
status: exempt
comment: This is a service integration not tied to specific device models.
docs-supported-functions:
status: done
docs-troubleshooting:
status: done
docs-use-cases:
status: done
dynamic-devices:
status: exempt
comment: The integration creates a single device entry for the service connection.
entity-category:
status: exempt
comment: This integration does not create its own entities.
entity-device-class:
status: exempt
comment: This integration does not create its own entities.
entity-disabled-by-default:
status: exempt
comment: This integration does not create its own entities.
entity-translations:
status: exempt
comment: This integration does not create its own entities.
exception-translations:
status: done
icon-translations:
status: exempt
comment: This integration does not create its own entities.
reconfiguration-flow:
status: todo
comment: Reconfiguration will be added in a follow-up PR to allow updating the device name.
repair-issues:
status: exempt
comment: Authentication issues are handled via the reauthentication flow.
stale-devices:
status: exempt
comment: Creates a single service device entry tied to the config entry.
# Platinum
async-dependency:
status: done
inject-websession:
status: done
strict-typing:
status: todo
comment: Full strict typing compliance will be addressed in a future update.

View File

@@ -1,71 +0,0 @@
{
"config": {
"abort": {
"already_configured": "This device is already configured.",
"reauth_successful": "Reauthentication successful."
},
"create_entry": {
"add_sensor_mapping_hint": "You can now add mappings from any sensor in Home Assistant to {integration_name} using the '+ add sensor mapping' button."
},
"error": {
"cannot_connect": "Failed to connect to {integration_name} API.",
"claim_failed_or_timed_out": "Claiming the device failed or the code expired.",
"invalid_auth": "Invalid provisioning key or secret.",
"unknown_auth_error": "Unexpected error occurred during authentication."
},
"step": {
"auth_and_claim": {
"description": "This Home Assistant connection needs to be claimed in your {integration_name} portal before it can send data.\n\n1. Go to: {claim_url}\n2. Enter code: **{claim_code}**\n3. (Code expires: {valid_until})\n\nAfter successfully claiming the device in {integration_name}, select **Submit** below to continue.",
"title": "Claim device in {integration_name}"
},
"reauth_confirm": {
"data": {
"provisioning_key": "[%key:component::energyid::config::step::user::data::provisioning_key%]",
"provisioning_secret": "[%key:component::energyid::config::step::user::data::provisioning_secret%]"
},
"data_description": {
"provisioning_key": "[%key:component::energyid::config::step::user::data_description::provisioning_key%]",
"provisioning_secret": "[%key:component::energyid::config::step::user::data_description::provisioning_secret%]"
},
"description": "Please re-enter your {integration_name} provisioning key and secret to restore the connection.\n\nMore info: {docs_url}",
"title": "Reauthenticate {integration_name}"
},
"user": {
"data": {
"provisioning_key": "Provisioning key",
"provisioning_secret": "Provisioning secret"
},
"data_description": {
"provisioning_key": "Your unique key for provisioning.",
"provisioning_secret": "Your secret associated with the provisioning key."
},
"description": "Enter your {integration_name} webhook provisioning key and secret. Find these in your {integration_name} integration setup under provisioning credentials.\n\nMore info: {docs_url}",
"title": "Connect to {integration_name}"
}
}
},
"config_subentries": {
"sensor_mapping": {
"entry_type": "service",
"error": {
"entity_already_mapped": "This Home Assistant entity is already mapped.",
"entity_required": "You must select a sensor entity."
},
"initiate_flow": {
"user": "Add sensor mapping"
},
"step": {
"user": {
"data": {
"ha_entity_id": "Home Assistant sensor"
},
"data_description": {
"ha_entity_id": "Select the sensor from Home Assistant to send to {integration_name}."
},
"description": "Select a Home Assistant sensor to send to {integration_name}. The sensor name will be used as the {integration_name} metric key.",
"title": "Add sensor mapping"
}
}
}
}
}

View File

@@ -25,7 +25,6 @@ from .domain_data import DomainData
from .encryption_key_storage import async_get_encryption_key_storage
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
from .manager import DEVICE_CONFLICT_ISSUE_FORMAT, ESPHomeManager, cleanup_instance
from .websocket_api import async_setup as async_setup_websocket_api
_LOGGER = logging.getLogger(__name__)
@@ -39,7 +38,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
ffmpeg_proxy.async_setup(hass)
await assist_satellite.async_setup(hass)
await dashboard.async_setup(hass)
async_setup_websocket_api(hass)
return True

View File

@@ -90,7 +90,7 @@ def async_static_info_updated(
# Create new entity if it doesn't exist
if not old_info:
entity = entity_type(entry_data, info, state_type)
entity = entity_type(entry_data, platform.domain, info, state_type)
add_entities.append(entity)
continue
@@ -112,7 +112,7 @@ def async_static_info_updated(
old_info.device_id,
info.device_id,
)
entity = entity_type(entry_data, info, state_type)
entity = entity_type(entry_data, platform.domain, info, state_type)
add_entities.append(entity)
continue
@@ -162,7 +162,7 @@ def async_static_info_updated(
entry_data.async_signal_entity_removal(info_type, old_info.device_id, info.key)
# Create new entity with the new device_id
add_entities.append(entity_type(entry_data, info, state_type))
add_entities.append(entity_type(entry_data, platform.domain, info, state_type))
# Anything still in current_infos is now gone
if current_infos:
@@ -329,6 +329,7 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
def __init__(
self,
entry_data: RuntimeEntryData,
domain: str,
entity_info: EntityInfo,
state_type: type[_StateT],
) -> None:
@@ -342,6 +343,7 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
self._state_type = state_type
self._on_static_info_update(entity_info)
device_name = device_info.name
# Determine the device connection based on whether this entity belongs to a sub device
if entity_info.device_id:
# Entity belongs to a sub device
@@ -350,12 +352,27 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
(DOMAIN, f"{device_info.mac_address}_{entity_info.device_id}")
}
)
# Use the pre-computed device_id_to_name mapping for O(1) lookup
device_name = entry_data.device_id_to_name.get(
entity_info.device_id, device_info.name
)
else:
# Entity belongs to the main device
self._attr_device_info = DeviceInfo(
connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)}
)
if entity_info.name:
self.entity_id = f"{domain}.{device_name}_{entity_info.name}"
else:
# https://github.com/home-assistant/core/issues/132532
# If name is not set, ESPHome will use the sanitized friendly name
# as the name, however we want to use the original object_id
# as the entity_id before it is sanitized since the sanitizer
# is not utf-8 aware. In this case, its always going to be
# an empty string so we drop the object_id.
self.entity_id = f"{domain}.{device_name}"
async def async_added_to_hass(self) -> None:
"""Register callbacks."""
entry_data = self._entry_data

View File

@@ -17,7 +17,7 @@
"mqtt": ["esphome/discover/#"],
"quality_scale": "platinum",
"requirements": [
"aioesphomeapi==42.8.0",
"aioesphomeapi==42.7.0",
"esphome-dashboard-api==1.3.0",
"bleak-esphome==3.4.0"
],

Some files were not shown because too many files have changed in this diff Show More