mirror of
https://github.com/home-assistant/core.git
synced 2025-11-30 04:58:01 +00:00
Compare commits
126 Commits
labs_helpe
...
add-app-pa
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
a209b98d1c | ||
|
|
e10c1ebcf6 | ||
|
|
0174bad182 | ||
|
|
d5be623684 | ||
|
|
d006b044c8 | ||
|
|
fdd9571623 | ||
|
|
4f4c5152b9 | ||
|
|
b031a082cd | ||
|
|
a1132195fd | ||
|
|
708b3dc8b2 | ||
|
|
8ae0216135 | ||
|
|
1472281cd5 | ||
|
|
ceaa71d198 | ||
|
|
7f0d0c555a | ||
|
|
3b94b2491a | ||
|
|
8c8708d5bc | ||
|
|
ca35102138 | ||
|
|
1a1b50ef1a | ||
|
|
5a4d51e57a | ||
|
|
9e1bc637e2 | ||
|
|
ab879c07ca | ||
|
|
488c97531e | ||
|
|
3b52c5df79 | ||
|
|
7f4b56104d | ||
|
|
ab8135ba1a | ||
|
|
a88599bc09 | ||
|
|
45034279c8 | ||
|
|
9f3dae6254 | ||
|
|
ef36d7b1e5 | ||
|
|
e5346ba017 | ||
|
|
68d41d2a48 | ||
|
|
00a882c20a | ||
|
|
44a6772947 | ||
|
|
f874ba1355 | ||
|
|
4fc125c49a | ||
|
|
8c59196e19 | ||
|
|
326f7f0559 | ||
|
|
11afda8c22 | ||
|
|
f1ee0e4ac9 | ||
|
|
5f522e5afa | ||
|
|
4f6624d0aa | ||
|
|
70990645a7 | ||
|
|
2f7d74ff62 | ||
|
|
885667832b | ||
|
|
4646929987 | ||
|
|
010aea952c | ||
|
|
563678dc47 | ||
|
|
a48f01f213 | ||
|
|
08b758b0d2 | ||
|
|
4306fbea52 | ||
|
|
6f4c479f8f | ||
|
|
1d9c06264e | ||
|
|
d045ecaf13 | ||
|
|
f7c41e694c | ||
|
|
9ee7ed5cdb | ||
|
|
83c4e2abc9 | ||
|
|
a7dbf551a3 | ||
|
|
0b2bb9f6bf | ||
|
|
0769163b67 | ||
|
|
2bb51e1146 | ||
|
|
d2248d282c | ||
|
|
8fe79a88ca | ||
|
|
7a328539b2 | ||
|
|
ec69efee4d | ||
|
|
dbcde549d4 | ||
|
|
988355e138 | ||
|
|
7711eac607 | ||
|
|
32fe53cceb | ||
|
|
3a65d3c0dc | ||
|
|
7fe26223ac | ||
|
|
7e8496afb2 | ||
|
|
2ec5190243 | ||
|
|
a706db8fdb | ||
|
|
a00923c48b | ||
|
|
7480d59f0f | ||
|
|
4c8d9ed401 | ||
|
|
eef10c59db | ||
|
|
a1a1f8dd77 | ||
|
|
c75a5c5151 | ||
|
|
cdaaa2bd8f | ||
|
|
bd84dac8fb | ||
|
|
42cbeca5b0 | ||
|
|
ad0a498d10 | ||
|
|
973405822b | ||
|
|
b883d2f519 | ||
|
|
4654d6de87 | ||
|
|
990c8cd4e6 | ||
|
|
f8c76f42e3 | ||
|
|
21d914c8ca | ||
|
|
ec77add1a6 | ||
|
|
ef3b7dfd1d | ||
|
|
51241d963d | ||
|
|
7c48e6e046 | ||
|
|
38d8da4279 | ||
|
|
3396a72fa8 | ||
|
|
2d26ab390e | ||
|
|
1bf5bc9323 | ||
|
|
87ea96a3e0 | ||
|
|
e3cf65510b | ||
|
|
f69fce68d6 | ||
|
|
f758cfa82f | ||
|
|
9c7a928b29 | ||
|
|
405a9948a2 | ||
|
|
0e3bab3ce4 | ||
|
|
4900d25ac8 | ||
|
|
ea10cdb4b0 | ||
|
|
6baf77d256 | ||
|
|
13bc0ebed8 | ||
|
|
611af9c832 | ||
|
|
c2b7a63dd9 | ||
|
|
550716a753 | ||
|
|
56a71e6798 | ||
|
|
80ec51c56b | ||
|
|
ea651c4a22 | ||
|
|
ff40ce419e | ||
|
|
d95308719c | ||
|
|
f4fb95ee43 | ||
|
|
14d95cc86b | ||
|
|
4257435975 | ||
|
|
a6aab088fb | ||
|
|
655a63c104 | ||
|
|
a2ade413c2 | ||
|
|
10299b2ef4 | ||
|
|
26444d8d34 | ||
|
|
554c122a37 | ||
|
|
1c0dd02a7c |
158
.github/workflows/builder.yml
vendored
158
.github/workflows/builder.yml
vendored
@@ -14,7 +14,9 @@ env:
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
BASE_IMAGE_VERSION: "2025.11.0"
|
||||
# Base image version from https://github.com/home-assistant/docker
|
||||
BASE_IMAGE_VERSION: "2025.11.3"
|
||||
ARCHITECTURES: '["amd64", "aarch64"]'
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -25,6 +27,7 @@ jobs:
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
architectures: ${{ env.ARCHITECTURES }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
@@ -85,7 +88,7 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ["amd64", "aarch64"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
@@ -187,7 +190,8 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Install Cosign
|
||||
- &install_cosign
|
||||
name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.5.3"
|
||||
@@ -291,7 +295,7 @@ jobs:
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
uses: home-assistant/builder@2025.11.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -350,13 +354,7 @@ jobs:
|
||||
matrix:
|
||||
registry: ["ghcr.io/home-assistant", "docker.io/homeassistant"]
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
cosign-release: "v2.2.3"
|
||||
- *install_cosign
|
||||
|
||||
- name: Login to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
@@ -366,88 +364,94 @@ jobs:
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: matrix.registry == 'ghcr.io/home-assistant'
|
||||
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Meta Image
|
||||
- name: Verify architecture image signatures
|
||||
shell: bash
|
||||
run: |
|
||||
export DOCKER_CLI_EXPERIMENTAL=enabled
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Verifying ${arch} image signature..."
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp https://github.com/home-assistant/core/.* \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
echo "✓ All images verified successfully"
|
||||
|
||||
function create_manifest() {
|
||||
local tag_l=${1}
|
||||
local tag_r=${2}
|
||||
local registry=${{ matrix.registry }}
|
||||
# Generate all Docker tags based on version string
|
||||
# Version format: YYYY.MM.PATCH, YYYY.MM.PATCHbN (beta), or YYYY.MM.PATCH.devYYYYMMDDHHMM (dev)
|
||||
# Examples:
|
||||
# 2025.12.1 (stable) -> tags: 2025.12.1, 2025.12, stable, latest, beta, rc
|
||||
# 2025.12.0b3 (beta) -> tags: 2025.12.0b3, beta, rc
|
||||
# 2025.12.0.dev202511250240 -> tags: 2025.12.0.dev202511250240, dev
|
||||
- name: Generate Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
|
||||
with:
|
||||
images: ${{ matrix.registry }}/home-assistant
|
||||
sep-tags: ","
|
||||
tags: |
|
||||
type=raw,value=${{ needs.init.outputs.version }},priority=9999
|
||||
type=raw,value=dev,enable=${{ contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=beta,enable=${{ !contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=rc,enable=${{ !contains(needs.init.outputs.version, 'd') }}
|
||||
type=raw,value=stable,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
type=raw,value=latest,enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
type=semver,pattern={{major}}.{{minor}},value=${{ needs.init.outputs.version }},enable=${{ !contains(needs.init.outputs.version, 'd') && !contains(needs.init.outputs.version, 'b') }}
|
||||
|
||||
docker manifest create "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}"
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.7.1
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/amd64-homeassistant:${tag_r}" \
|
||||
--os linux --arch amd64
|
||||
- name: Copy architecture images to DockerHub
|
||||
if: matrix.registry == 'docker.io/homeassistant'
|
||||
shell: bash
|
||||
run: |
|
||||
# Use imagetools to copy image blobs directly between registries
|
||||
# This preserves provenance/attestations and seems to be much faster than pull/push
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
for arch in $ARCHS; do
|
||||
echo "Copying ${arch} image to DockerHub..."
|
||||
docker buildx imagetools create \
|
||||
--tag "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}" \
|
||||
"ghcr.io/home-assistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
cosign sign --yes "docker.io/homeassistant/${arch}-homeassistant:${{ needs.init.outputs.version }}"
|
||||
done
|
||||
|
||||
docker manifest annotate "${registry}/home-assistant:${tag_l}" \
|
||||
"${registry}/aarch64-homeassistant:${tag_r}" \
|
||||
--os linux --arch arm64 --variant=v8
|
||||
- name: Create and push multi-arch manifests
|
||||
shell: bash
|
||||
run: |
|
||||
# Build list of architecture images dynamically
|
||||
ARCHS=$(echo '${{ needs.init.outputs.architectures }}' | jq -r '.[]')
|
||||
ARCH_IMAGES=()
|
||||
for arch in $ARCHS; do
|
||||
ARCH_IMAGES+=("${{ matrix.registry }}/${arch}-homeassistant:${{ needs.init.outputs.version }}")
|
||||
done
|
||||
|
||||
docker manifest push --purge "${registry}/home-assistant:${tag_l}"
|
||||
cosign sign --yes "${registry}/home-assistant:${tag_l}"
|
||||
}
|
||||
# Build list of all tags for single manifest creation
|
||||
# Note: Using sep-tags=',' in metadata-action for easier parsing
|
||||
TAG_ARGS=()
|
||||
IFS=',' read -ra TAGS <<< "${{ steps.meta.outputs.tags }}"
|
||||
for tag in "${TAGS[@]}"; do
|
||||
TAG_ARGS+=("--tag" "${tag}")
|
||||
done
|
||||
|
||||
function validate_image() {
|
||||
local image=${1}
|
||||
if ! cosign verify --certificate-oidc-issuer https://token.actions.githubusercontent.com --certificate-identity-regexp https://github.com/home-assistant/core/.* "${image}"; then
|
||||
echo "Invalid signature!"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
# Create manifest with ALL tags in a single operation (much faster!)
|
||||
echo "Creating multi-arch manifest with tags: ${TAGS[*]}"
|
||||
docker buildx imagetools create "${TAG_ARGS[@]}" "${ARCH_IMAGES[@]}"
|
||||
|
||||
function push_dockerhub() {
|
||||
local image=${1}
|
||||
local tag=${2}
|
||||
# Sign each tag separately (signing requires individual tag names)
|
||||
echo "Signing all tags..."
|
||||
for tag in "${TAGS[@]}"; do
|
||||
echo "Signing ${tag}"
|
||||
cosign sign --yes "${tag}"
|
||||
done
|
||||
|
||||
docker tag "ghcr.io/home-assistant/${image}:${tag}" "docker.io/homeassistant/${image}:${tag}"
|
||||
docker push "docker.io/homeassistant/${image}:${tag}"
|
||||
cosign sign --yes "docker.io/homeassistant/${image}:${tag}"
|
||||
}
|
||||
|
||||
# Pull images from github container registry and verify signature
|
||||
docker pull "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
docker pull "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
validate_image "ghcr.io/home-assistant/amd64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
validate_image "ghcr.io/home-assistant/aarch64-homeassistant:${{ needs.init.outputs.version }}"
|
||||
|
||||
if [[ "${{ matrix.registry }}" == "docker.io/homeassistant" ]]; then
|
||||
# Upload images to dockerhub
|
||||
push_dockerhub "amd64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
push_dockerhub "aarch64-homeassistant" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
|
||||
# Create version tag
|
||||
create_manifest "${{ needs.init.outputs.version }}" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create general tags
|
||||
if [[ "${{ needs.init.outputs.version }}" =~ d ]]; then
|
||||
create_manifest "dev" "${{ needs.init.outputs.version }}"
|
||||
elif [[ "${{ needs.init.outputs.version }}" =~ b ]]; then
|
||||
create_manifest "beta" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "rc" "${{ needs.init.outputs.version }}"
|
||||
else
|
||||
create_manifest "stable" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "latest" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "beta" "${{ needs.init.outputs.version }}"
|
||||
create_manifest "rc" "${{ needs.init.outputs.version }}"
|
||||
|
||||
# Create series version tag (e.g. 2021.6)
|
||||
v="${{ needs.init.outputs.version }}"
|
||||
create_manifest "${v%.*}" "${{ needs.init.outputs.version }}"
|
||||
fi
|
||||
echo "All manifests created and signed successfully"
|
||||
|
||||
build_python:
|
||||
name: Build PyPi package
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 2
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 1
|
||||
HA_SHORT_VERSION: "2025.12"
|
||||
HA_SHORT_VERSION: "2026.1"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13', '3.14']"
|
||||
# 10.3 is the oldest supported version
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
uses: actions/ai-inference@02c6cc30ae592ce65ee356387748dfc2fd5f7993 # v2.0.3
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
uses: actions/ai-inference@02c6cc30ae592ce65ee356387748dfc2fd5f7993 # v2.0.3
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
@@ -187,6 +187,7 @@ homeassistant.components.elkm1.*
|
||||
homeassistant.components.emulated_hue.*
|
||||
homeassistant.components.energenie_power_sockets.*
|
||||
homeassistant.components.energy.*
|
||||
homeassistant.components.energyid.*
|
||||
homeassistant.components.energyzero.*
|
||||
homeassistant.components.enigma2.*
|
||||
homeassistant.components.enphase_envoy.*
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -452,6 +452,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/energenie_power_sockets/ @gnumpi
|
||||
/homeassistant/components/energy/ @home-assistant/core
|
||||
/tests/components/energy/ @home-assistant/core
|
||||
/homeassistant/components/energyid/ @JrtPec @Molier
|
||||
/tests/components/energyid/ @JrtPec @Molier
|
||||
/homeassistant/components/energyzero/ @klaasnicolaas
|
||||
/tests/components/energyzero/ @klaasnicolaas
|
||||
/homeassistant/components/enigma2/ @autinerd
|
||||
|
||||
@@ -35,25 +35,22 @@ COPY --from=ghcr.io/astral-sh/uv:latest /uv /usr/local/bin/uv
|
||||
|
||||
USER vscode
|
||||
|
||||
COPY .python-version ./
|
||||
RUN uv python install
|
||||
|
||||
ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv"
|
||||
RUN uv venv $VIRTUAL_ENV
|
||||
RUN --mount=type=bind,source=.python-version,target=.python-version \
|
||||
uv python install \
|
||||
&& uv venv $VIRTUAL_ENV
|
||||
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
||||
|
||||
WORKDIR /tmp
|
||||
|
||||
# Setup hass-release
|
||||
RUN git clone --depth 1 https://github.com/home-assistant/hass-release ~/hass-release \
|
||||
&& uv pip install -e ~/hass-release/
|
||||
|
||||
# Install Python dependencies from requirements
|
||||
COPY requirements.txt ./
|
||||
COPY homeassistant/package_constraints.txt homeassistant/package_constraints.txt
|
||||
RUN uv pip install -r requirements.txt
|
||||
COPY requirements_test.txt requirements_test_pre_commit.txt ./
|
||||
RUN uv pip install -r requirements_test.txt
|
||||
RUN --mount=type=bind,source=requirements.txt,target=requirements.txt \
|
||||
--mount=type=bind,source=homeassistant/package_constraints.txt,target=homeassistant/package_constraints.txt \
|
||||
--mount=type=bind,source=requirements_test.txt,target=requirements_test.txt \
|
||||
--mount=type=bind,source=requirements_test_pre_commit.txt,target=requirements_test_pre_commit.txt \
|
||||
uv pip install -r requirements.txt -r requirements_test.txt
|
||||
|
||||
WORKDIR /workspaces
|
||||
|
||||
|
||||
@@ -1000,7 +1000,7 @@ class _WatchPendingSetups:
|
||||
# We log every LOG_SLOW_STARTUP_INTERVAL until all integrations are done
|
||||
# once we take over LOG_SLOW_STARTUP_INTERVAL (60s) to start up
|
||||
_LOGGER.warning(
|
||||
"Waiting on integrations to complete setup: %s",
|
||||
"Waiting for integrations to complete setup: %s",
|
||||
self._setup_started,
|
||||
)
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import _LOGGER, CONF_LOGIN_DATA, DOMAIN
|
||||
|
||||
SCAN_INTERVAL = 30
|
||||
SCAN_INTERVAL = 300
|
||||
|
||||
type AmazonConfigEntry = ConfigEntry[AmazonDevicesCoordinator]
|
||||
|
||||
@@ -45,7 +45,7 @@ class AmazonDevicesCoordinator(DataUpdateCoordinator[dict[str, AmazonDevice]]):
|
||||
config_entry=entry,
|
||||
update_interval=timedelta(seconds=SCAN_INTERVAL),
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass, _LOGGER, cooldown=30, immediate=False
|
||||
hass, _LOGGER, cooldown=SCAN_INTERVAL, immediate=False
|
||||
),
|
||||
)
|
||||
self.api = AmazonEchoApi(
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==9.0.3"]
|
||||
"requirements": ["aioamazondevices==10.0.0"]
|
||||
}
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.auth import MSOB2CAuth
|
||||
from pyanglianwater.exceptions import (
|
||||
@@ -18,7 +19,7 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
|
||||
@@ -33,7 +34,10 @@ async def async_setup_entry(
|
||||
auth = MSOB2CAuth(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=async_get_clientsession(hass),
|
||||
session=async_create_clientsession(
|
||||
hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
refresh_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
account_number=entry.data[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
|
||||
@@ -18,17 +18,21 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AnglianWaterEntity(CoordinatorEntity[AnglianWaterUpdateCoordinator]):
|
||||
"""Defines a Anglian Water entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AnglianWaterUpdateCoordinator,
|
||||
smart_meter: SmartMeter,
|
||||
key: str,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water entity."""
|
||||
super().__init__(coordinator)
|
||||
self.smart_meter = smart_meter
|
||||
self._attr_unique_id = f"{smart_meter.serial_number}_{key}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, smart_meter.serial_number)},
|
||||
name="Smart Water Meter",
|
||||
name=smart_meter.serial_number,
|
||||
manufacturer="Anglian Water",
|
||||
serial_number=smart_meter.serial_number,
|
||||
)
|
||||
|
||||
@@ -108,9 +108,8 @@ class AnglianWaterSensorEntity(AnglianWaterEntity, SensorEntity):
|
||||
description: AnglianWaterSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water sensor."""
|
||||
super().__init__(coordinator, smart_meter)
|
||||
super().__init__(coordinator, smart_meter, description.key)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{smart_meter.serial_number}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
|
||||
@@ -17,7 +17,7 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_CHAT_MODEL, DEFAULT, DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
from .const import DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -37,14 +37,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
try:
|
||||
# Use model from first conversation subentry for validation
|
||||
subentries = list(entry.subentries.values())
|
||||
if subentries:
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
else:
|
||||
model_id = DEFAULT[CONF_CHAT_MODEL]
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
await client.models.list(timeout=10.0)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
|
||||
@@ -421,6 +421,8 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
)
|
||||
if short_form.search(model_alias):
|
||||
model_alias += "-0"
|
||||
if model_alias.endswith(("haiku", "opus", "sonnet")):
|
||||
model_alias += "-latest"
|
||||
model_options.append(
|
||||
SelectOptionDict(
|
||||
label=model_info.display_name,
|
||||
|
||||
@@ -583,7 +583,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
identifiers={(DOMAIN, subentry.subentry_id)},
|
||||
name=subentry.title,
|
||||
manufacturer="Anthropic",
|
||||
model="Claude",
|
||||
model=subentry.data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL]),
|
||||
entry_type=dr.DeviceEntryType.SERVICE,
|
||||
)
|
||||
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/anthropic",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"requirements": ["anthropic==0.73.0"]
|
||||
"requirements": ["anthropic==0.75.0"]
|
||||
}
|
||||
|
||||
@@ -1123,63 +1123,6 @@ class PipelineRun:
|
||||
)
|
||||
|
||||
try:
|
||||
user_input = conversation.ConversationInput(
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
agent_id = "sentence_trigger"
|
||||
processed_locally = True
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
if self.tts_stream and self.tts_stream.supports_streaming_input:
|
||||
tts_input_stream: asyncio.Queue[str | None] | None = asyncio.Queue()
|
||||
else:
|
||||
@@ -1265,6 +1208,17 @@ class PipelineRun:
|
||||
assert self.tts_stream is not None
|
||||
self.tts_stream.async_set_message_stream(tts_input_stream_generator())
|
||||
|
||||
user_input = conversation.ConversationInput(
|
||||
text=intent_input,
|
||||
context=self.context,
|
||||
conversation_id=conversation_id,
|
||||
device_id=self._device_id,
|
||||
satellite_id=self._satellite_id,
|
||||
language=input_language,
|
||||
agent_id=self.intent_agent.id,
|
||||
extra_system_prompt=conversation_extra_system_prompt,
|
||||
)
|
||||
|
||||
with (
|
||||
chat_session.async_get_chat_session(
|
||||
self.hass, user_input.conversation_id
|
||||
@@ -1276,6 +1230,53 @@ class PipelineRun:
|
||||
chat_log_delta_listener=chat_log_delta_listener,
|
||||
) as chat_log,
|
||||
):
|
||||
agent_id = self.intent_agent.id
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
all_targets_in_satellite_area = False
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
:= await conversation.async_handle_sentence_triggers(
|
||||
self.hass, user_input, chat_log
|
||||
)
|
||||
) is not None:
|
||||
# Sentence trigger matched
|
||||
agent_id = "sentence_trigger"
|
||||
processed_locally = True
|
||||
intent_response = intent.IntentResponse(
|
||||
self.pipeline.conversation_language
|
||||
)
|
||||
intent_response.async_set_speech(trigger_response_text)
|
||||
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None
|
||||
# If the LLM has API access, we filter out some sentences that are
|
||||
# interfering with LLM operation.
|
||||
if (
|
||||
intent_agent_state := self.hass.states.get(self.intent_agent.id)
|
||||
) and intent_agent_state.attributes.get(
|
||||
ATTR_SUPPORTED_FEATURES, 0
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
chat_log,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
agent_id = conversation.HOME_ASSISTANT_AGENT
|
||||
processed_locally = True
|
||||
|
||||
# It was already handled, create response and add to chat history
|
||||
if intent_response is not None:
|
||||
speech: str = intent_response.speech.get("plain", {}).get(
|
||||
|
||||
@@ -12,8 +12,9 @@ from typing import Any, Protocol, cast
|
||||
from propcache.api import cached_property
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components import labs, websocket_api
|
||||
from homeassistant.components.blueprint import CONF_USE_BLUEPRINT
|
||||
from homeassistant.components.labs import async_listen as async_labs_listen
|
||||
from homeassistant.const import (
|
||||
ATTR_ENTITY_ID,
|
||||
ATTR_MODE,
|
||||
@@ -114,6 +115,51 @@ ATTR_SOURCE = "source"
|
||||
ATTR_VARIABLES = "variables"
|
||||
SERVICE_TRIGGER = "trigger"
|
||||
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG = "new_triggers_conditions"
|
||||
|
||||
_EXPERIMENTAL_CONDITION_PLATFORMS = {
|
||||
"light",
|
||||
}
|
||||
|
||||
_EXPERIMENTAL_TRIGGER_PLATFORMS = {
|
||||
"alarm_control_panel",
|
||||
"assist_satellite",
|
||||
"climate",
|
||||
"cover",
|
||||
"fan",
|
||||
"lawn_mower",
|
||||
"light",
|
||||
"media_player",
|
||||
"text",
|
||||
"vacuum",
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def is_disabled_experimental_condition(hass: HomeAssistant, platform: str) -> bool:
|
||||
"""Check if the platform is a disabled experimental condition platform."""
|
||||
return (
|
||||
platform in _EXPERIMENTAL_CONDITION_PLATFORMS
|
||||
and not labs.async_is_preview_feature_enabled(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def is_disabled_experimental_trigger(hass: HomeAssistant, platform: str) -> bool:
|
||||
"""Check if the platform is a disabled experimental trigger platform."""
|
||||
return (
|
||||
platform in _EXPERIMENTAL_TRIGGER_PLATFORMS
|
||||
and not labs.async_is_preview_feature_enabled(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
class IfAction(Protocol):
|
||||
"""Define the format of if_action."""
|
||||
@@ -317,6 +363,20 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
schema=vol.Schema({vol.Optional(CONF_ID): str}),
|
||||
)
|
||||
|
||||
@callback
|
||||
def new_triggers_conditions_listener() -> None:
|
||||
"""Handle new_triggers_conditions flag change."""
|
||||
hass.async_create_task(
|
||||
reload_helper.execute_service(ServiceCall(hass, DOMAIN, SERVICE_RELOAD))
|
||||
)
|
||||
|
||||
async_labs_listen(
|
||||
hass,
|
||||
DOMAIN,
|
||||
NEW_TRIGGERS_CONDITIONS_FEATURE_FLAG,
|
||||
new_triggers_conditions_listener,
|
||||
)
|
||||
|
||||
websocket_api.async_register_command(hass, websocket_config)
|
||||
|
||||
return True
|
||||
|
||||
@@ -17,8 +17,12 @@ from homeassistant.components.media_player import (
|
||||
class BangOlufsenSource:
|
||||
"""Class used for associating device source ids with friendly names. May not include all sources."""
|
||||
|
||||
DEEZER: Final[Source] = Source(name="Deezer", id="deezer")
|
||||
LINE_IN: Final[Source] = Source(name="Line-In", id="lineIn")
|
||||
NET_RADIO: Final[Source] = Source(name="B&O Radio", id="netRadio")
|
||||
SPDIF: Final[Source] = Source(name="Optical", id="spdif")
|
||||
TIDAL: Final[Source] = Source(name="Tidal", id="tidal")
|
||||
UNKNOWN: Final[Source] = Source(name="Unknown Source", id="unknown")
|
||||
URI_STREAMER: Final[Source] = Source(name="Audio Streamer", id="uriStreamer")
|
||||
|
||||
|
||||
@@ -78,6 +82,16 @@ class BangOlufsenModel(StrEnum):
|
||||
BEOREMOTE_ONE = "Beoremote One"
|
||||
|
||||
|
||||
class BangOlufsenAttribute(StrEnum):
|
||||
"""Enum for extra_state_attribute keys."""
|
||||
|
||||
BEOLINK = "beolink"
|
||||
BEOLINK_PEERS = "peers"
|
||||
BEOLINK_SELF = "self"
|
||||
BEOLINK_LEADER = "leader"
|
||||
BEOLINK_LISTENERS = "listeners"
|
||||
|
||||
|
||||
# Physical "buttons" on devices
|
||||
class BangOlufsenButtons(StrEnum):
|
||||
"""Enum for device buttons."""
|
||||
|
||||
@@ -82,6 +82,7 @@ from .const import (
|
||||
FALLBACK_SOURCES,
|
||||
MANUFACTURER,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenAttribute,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
WebsocketNotification,
|
||||
@@ -224,7 +225,8 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Beolink compatible sources
|
||||
self._beolink_sources: dict[str, bool] = {}
|
||||
self._remote_leader: BeolinkLeader | None = None
|
||||
# Extra state attributes for showing Beolink: peer(s), listener(s), leader and self
|
||||
# Extra state attributes:
|
||||
# Beolink: peer(s), listener(s), leader and self
|
||||
self._beolink_attributes: dict[str, dict[str, dict[str, str]]] = {}
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
@@ -436,7 +438,10 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
await self._async_update_beolink()
|
||||
|
||||
async def _async_update_beolink(self) -> None:
|
||||
"""Update the current Beolink leader, listeners, peers and self."""
|
||||
"""Update the current Beolink leader, listeners, peers and self.
|
||||
|
||||
Updates Home Assistant state.
|
||||
"""
|
||||
|
||||
self._beolink_attributes = {}
|
||||
|
||||
@@ -445,18 +450,24 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
|
||||
# Add Beolink self
|
||||
self._beolink_attributes = {
|
||||
"beolink": {"self": {self.device_entry.name: self._beolink_jid}}
|
||||
BangOlufsenAttribute.BEOLINK: {
|
||||
BangOlufsenAttribute.BEOLINK_SELF: {
|
||||
self.device_entry.name: self._beolink_jid
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
# Add Beolink peers
|
||||
peers = await self._client.get_beolink_peers()
|
||||
|
||||
if len(peers) > 0:
|
||||
self._beolink_attributes["beolink"]["peers"] = {}
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_PEERS
|
||||
] = {}
|
||||
for peer in peers:
|
||||
self._beolink_attributes["beolink"]["peers"][peer.friendly_name] = (
|
||||
peer.jid
|
||||
)
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_PEERS
|
||||
][peer.friendly_name] = peer.jid
|
||||
|
||||
# Add Beolink listeners / leader
|
||||
self._remote_leader = self._playback_metadata.remote_leader
|
||||
@@ -477,7 +488,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
# Add self
|
||||
group_members.append(self.entity_id)
|
||||
|
||||
self._beolink_attributes["beolink"]["leader"] = {
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_LEADER
|
||||
] = {
|
||||
self._remote_leader.friendly_name: self._remote_leader.jid,
|
||||
}
|
||||
|
||||
@@ -514,9 +527,9 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
beolink_listener.jid
|
||||
)
|
||||
break
|
||||
self._beolink_attributes["beolink"]["listeners"] = (
|
||||
beolink_listeners_attribute
|
||||
)
|
||||
self._beolink_attributes[BangOlufsenAttribute.BEOLINK][
|
||||
BangOlufsenAttribute.BEOLINK_LISTENERS
|
||||
] = beolink_listeners_attribute
|
||||
|
||||
self._attr_group_members = group_members
|
||||
|
||||
@@ -615,11 +628,18 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
return None
|
||||
|
||||
@property
|
||||
def media_content_type(self) -> str:
|
||||
def media_content_type(self) -> MediaType | str | None:
|
||||
"""Return the current media type."""
|
||||
# Hard to determine content type
|
||||
if self._source_change.id == BangOlufsenSource.URI_STREAMER.id:
|
||||
return MediaType.URL
|
||||
content_type = {
|
||||
BangOlufsenSource.URI_STREAMER.id: MediaType.URL,
|
||||
BangOlufsenSource.DEEZER.id: BangOlufsenMediaType.DEEZER,
|
||||
BangOlufsenSource.TIDAL.id: BangOlufsenMediaType.TIDAL,
|
||||
BangOlufsenSource.NET_RADIO.id: BangOlufsenMediaType.RADIO,
|
||||
}
|
||||
# Hard to determine content type.
|
||||
if self._source_change.id in content_type:
|
||||
return content_type[self._source_change.id]
|
||||
|
||||
return MediaType.MUSIC
|
||||
|
||||
@property
|
||||
@@ -632,6 +652,11 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
"""Return the current playback progress."""
|
||||
return self._playback_progress.progress
|
||||
|
||||
@property
|
||||
def media_content_id(self) -> str | None:
|
||||
"""Return internal ID of Deezer, Tidal and radio stations."""
|
||||
return self._playback_metadata.source_internal_id
|
||||
|
||||
@property
|
||||
def media_image_url(self) -> str | None:
|
||||
"""Return URL of the currently playing music."""
|
||||
|
||||
@@ -98,6 +98,12 @@
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"started_cooling": {
|
||||
"trigger": "mdi:snowflake"
|
||||
},
|
||||
"started_drying": {
|
||||
"trigger": "mdi:water-percent"
|
||||
},
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
|
||||
@@ -298,6 +298,28 @@
|
||||
},
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"started_cooling": {
|
||||
"description": "Triggers when a climate started cooling.",
|
||||
"description_configured": "[%key:component::climate::triggers::started_cooling::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate started cooling"
|
||||
},
|
||||
"started_drying": {
|
||||
"description": "Triggers when a climate started drying.",
|
||||
"description_configured": "[%key:component::climate::triggers::started_drying::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate started drying"
|
||||
},
|
||||
"started_heating": {
|
||||
"description": "Triggers when a climate starts to heat.",
|
||||
"description_configured": "[%key:component::climate::triggers::started_heating::description%]",
|
||||
|
||||
@@ -11,6 +11,12 @@ from homeassistant.helpers.trigger import (
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"started_cooling": make_entity_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.COOLING
|
||||
),
|
||||
"started_drying": make_entity_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.DRYING
|
||||
),
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_conditional_entity_state_trigger(
|
||||
DOMAIN,
|
||||
|
||||
@@ -14,6 +14,8 @@
|
||||
- last
|
||||
- any
|
||||
|
||||
started_cooling: *trigger_common
|
||||
started_drying: *trigger_common
|
||||
started_heating: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
|
||||
@@ -6,6 +6,7 @@ import io
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
|
||||
from hass_nabucasa import NabuCasaBaseError
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMError,
|
||||
@@ -93,10 +94,11 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Home Assistant Cloud AI Task entity."""
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
if not (cloud := hass.data[DATA_CLOUD]).is_logged_in:
|
||||
return
|
||||
try:
|
||||
await cloud.llm.async_ensure_token()
|
||||
except LLMError:
|
||||
except (LLMError, NabuCasaBaseError):
|
||||
return
|
||||
|
||||
async_add_entities([CloudLLMTaskEntity(cloud, config_entry)])
|
||||
|
||||
@@ -4,6 +4,7 @@ from __future__ import annotations
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from hass_nabucasa import NabuCasaBaseError
|
||||
from hass_nabucasa.llm import LLMError
|
||||
|
||||
from homeassistant.components import conversation
|
||||
@@ -23,10 +24,11 @@ async def async_setup_entry(
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Home Assistant Cloud conversation entity."""
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
if not (cloud := hass.data[DATA_CLOUD]).is_logged_in:
|
||||
return
|
||||
try:
|
||||
await cloud.llm.async_ensure_token()
|
||||
except LLMError:
|
||||
except (LLMError, NabuCasaBaseError):
|
||||
return
|
||||
|
||||
async_add_entities([CloudConversationEntity(cloud, config_entry)])
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.6.1"],
|
||||
"requirements": ["hass-nabucasa==1.6.2"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -236,7 +236,9 @@ async def async_prepare_agent(
|
||||
|
||||
|
||||
async def async_handle_sentence_triggers(
|
||||
hass: HomeAssistant, user_input: ConversationInput
|
||||
hass: HomeAssistant,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
) -> str | None:
|
||||
"""Try to match input against sentence triggers and return response text.
|
||||
|
||||
@@ -245,12 +247,13 @@ async def async_handle_sentence_triggers(
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
return await agent.async_handle_sentence_triggers(user_input)
|
||||
return await agent.async_handle_sentence_triggers(user_input, chat_log)
|
||||
|
||||
|
||||
async def async_handle_intents(
|
||||
hass: HomeAssistant,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
*,
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None,
|
||||
) -> intent.IntentResponse | None:
|
||||
@@ -261,7 +264,9 @@ async def async_handle_intents(
|
||||
agent = get_agent_manager(hass).default_agent
|
||||
assert agent is not None
|
||||
|
||||
return await agent.async_handle_intents(user_input, intent_filter=intent_filter)
|
||||
return await agent.async_handle_intents(
|
||||
user_input, chat_log, intent_filter=intent_filter
|
||||
)
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
|
||||
@@ -66,6 +66,7 @@ from homeassistant.helpers import (
|
||||
entity_registry as er,
|
||||
floor_registry as fr,
|
||||
intent,
|
||||
llm,
|
||||
start as ha_start,
|
||||
template,
|
||||
translation,
|
||||
@@ -76,7 +77,7 @@ from homeassistant.util import language as language_util
|
||||
from homeassistant.util.json import JsonObjectType, json_loads_object
|
||||
|
||||
from .agent_manager import get_agent_manager
|
||||
from .chat_log import AssistantContent, ChatLog
|
||||
from .chat_log import AssistantContent, ChatLog, ToolResultContent
|
||||
from .const import (
|
||||
DOMAIN,
|
||||
METADATA_CUSTOM_FILE,
|
||||
@@ -435,7 +436,7 @@ class DefaultAgent(ConversationEntity):
|
||||
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
|
||||
# Process callbacks and get response
|
||||
response_text = await self._handle_trigger_result(
|
||||
trigger_result, user_input
|
||||
trigger_result, user_input, chat_log
|
||||
)
|
||||
|
||||
# Convert to conversation result
|
||||
@@ -447,8 +448,9 @@ class DefaultAgent(ConversationEntity):
|
||||
if response is None:
|
||||
# Match intents
|
||||
intent_result = await self.async_recognize_intent(user_input)
|
||||
|
||||
response = await self._async_process_intent_result(
|
||||
intent_result, user_input
|
||||
intent_result, user_input, chat_log
|
||||
)
|
||||
|
||||
speech: str = response.speech.get("plain", {}).get("speech", "")
|
||||
@@ -467,6 +469,7 @@ class DefaultAgent(ConversationEntity):
|
||||
self,
|
||||
result: RecognizeResult | None,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
) -> intent.IntentResponse:
|
||||
"""Process user input with intents."""
|
||||
language = user_input.language or self.hass.config.language
|
||||
@@ -529,12 +532,21 @@ class DefaultAgent(ConversationEntity):
|
||||
ConversationTraceEventType.TOOL_CALL,
|
||||
{
|
||||
"intent_name": result.intent.name,
|
||||
"slots": {
|
||||
entity.name: entity.value or entity.text
|
||||
for entity in result.entities_list
|
||||
},
|
||||
"slots": {entity.name: entity.value for entity in result.entities_list},
|
||||
},
|
||||
)
|
||||
tool_input = llm.ToolInput(
|
||||
tool_name=result.intent.name,
|
||||
tool_args={entity.name: entity.value for entity in result.entities_list},
|
||||
external=True,
|
||||
)
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
AssistantContent(
|
||||
agent_id=user_input.agent_id,
|
||||
content=None,
|
||||
tool_calls=[tool_input],
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
intent_response = await intent.async_handle(
|
||||
@@ -597,6 +609,16 @@ class DefaultAgent(ConversationEntity):
|
||||
)
|
||||
intent_response.async_set_speech(speech)
|
||||
|
||||
tool_result = llm.IntentResponseDict(intent_response)
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
ToolResultContent(
|
||||
agent_id=user_input.agent_id,
|
||||
tool_call_id=tool_input.id,
|
||||
tool_name=tool_input.tool_name,
|
||||
tool_result=tool_result,
|
||||
)
|
||||
)
|
||||
|
||||
return intent_response
|
||||
|
||||
def _recognize(
|
||||
@@ -1523,16 +1545,31 @@ class DefaultAgent(ConversationEntity):
|
||||
)
|
||||
|
||||
async def _handle_trigger_result(
|
||||
self, result: SentenceTriggerResult, user_input: ConversationInput
|
||||
self,
|
||||
result: SentenceTriggerResult,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
) -> str:
|
||||
"""Run sentence trigger callbacks and return response text."""
|
||||
|
||||
# Gather callback responses in parallel
|
||||
trigger_callbacks = [
|
||||
self._triggers_details[trigger_id].callback(user_input, trigger_result)
|
||||
for trigger_id, trigger_result in result.matched_triggers.items()
|
||||
]
|
||||
|
||||
tool_input = llm.ToolInput(
|
||||
tool_name="trigger_sentence",
|
||||
tool_args={},
|
||||
external=True,
|
||||
)
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
AssistantContent(
|
||||
agent_id=user_input.agent_id,
|
||||
content=None,
|
||||
tool_calls=[tool_input],
|
||||
)
|
||||
)
|
||||
|
||||
# Use first non-empty result as response.
|
||||
#
|
||||
# There may be multiple copies of a trigger running when editing in
|
||||
@@ -1561,23 +1598,38 @@ class DefaultAgent(ConversationEntity):
|
||||
f"component.{DOMAIN}.conversation.agent.done", "Done"
|
||||
)
|
||||
|
||||
tool_result: dict[str, Any] = {"response": response_text}
|
||||
chat_log.async_add_assistant_content_without_tools(
|
||||
ToolResultContent(
|
||||
agent_id=user_input.agent_id,
|
||||
tool_call_id=tool_input.id,
|
||||
tool_name=tool_input.tool_name,
|
||||
tool_result=tool_result,
|
||||
)
|
||||
)
|
||||
|
||||
return response_text
|
||||
|
||||
async def async_handle_sentence_triggers(
|
||||
self, user_input: ConversationInput
|
||||
self,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
) -> str | None:
|
||||
"""Try to input sentence against sentence triggers and return response text.
|
||||
|
||||
Returns None if no match occurred.
|
||||
"""
|
||||
if trigger_result := await self.async_recognize_sentence_trigger(user_input):
|
||||
return await self._handle_trigger_result(trigger_result, user_input)
|
||||
return await self._handle_trigger_result(
|
||||
trigger_result, user_input, chat_log
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
async def async_handle_intents(
|
||||
self,
|
||||
user_input: ConversationInput,
|
||||
chat_log: ChatLog,
|
||||
*,
|
||||
intent_filter: Callable[[RecognizeResult], bool] | None = None,
|
||||
) -> intent.IntentResponse | None:
|
||||
@@ -1593,7 +1645,7 @@ class DefaultAgent(ConversationEntity):
|
||||
# No error message on failed match
|
||||
return None
|
||||
|
||||
response = await self._async_process_intent_result(result, user_input)
|
||||
response = await self._async_process_intent_result(result, user_input, chat_log)
|
||||
if (
|
||||
response.response_type == intent.IntentResponseType.ERROR
|
||||
and response.error_code
|
||||
|
||||
@@ -8,6 +8,10 @@ from typing import Any
|
||||
from pycoolmasternet_async import SWING_MODES
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
FAN_AUTO,
|
||||
FAN_HIGH,
|
||||
FAN_LOW,
|
||||
FAN_MEDIUM,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACMode,
|
||||
@@ -31,7 +35,16 @@ CM_TO_HA_STATE = {
|
||||
|
||||
HA_STATE_TO_CM = {value: key for key, value in CM_TO_HA_STATE.items()}
|
||||
|
||||
FAN_MODES = ["low", "med", "high", "auto"]
|
||||
CM_TO_HA_FAN = {
|
||||
"low": FAN_LOW,
|
||||
"med": FAN_MEDIUM,
|
||||
"high": FAN_HIGH,
|
||||
"auto": FAN_AUTO,
|
||||
}
|
||||
|
||||
HA_FAN_TO_CM = {value: key for key, value in CM_TO_HA_FAN.items()}
|
||||
|
||||
FAN_MODES = list(CM_TO_HA_FAN.values())
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -111,7 +124,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
@property
|
||||
def fan_mode(self):
|
||||
"""Return the fan setting."""
|
||||
return self._unit.fan_speed
|
||||
return CM_TO_HA_FAN[self._unit.fan_speed]
|
||||
|
||||
@property
|
||||
def fan_modes(self):
|
||||
@@ -138,7 +151,7 @@ class CoolmasterClimate(CoolmasterEntity, ClimateEntity):
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new fan mode."""
|
||||
_LOGGER.debug("Setting fan mode of %s to %s", self.unique_id, fan_mode)
|
||||
self._unit = await self._unit.set_fan_speed(fan_mode)
|
||||
self._unit = await self._unit.set_fan_speed(HA_FAN_TO_CM[fan_mode])
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_set_swing_mode(self, swing_mode: str) -> None:
|
||||
|
||||
@@ -15,6 +15,11 @@ from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
def normalize_pairing_code(code: str) -> str:
|
||||
"""Normalize pairing code by removing spaces and capitalizing."""
|
||||
return code.replace(" ", "").upper()
|
||||
|
||||
|
||||
class DropletConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle Droplet config flow."""
|
||||
|
||||
@@ -52,14 +57,13 @@ class DropletConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
if user_input is not None:
|
||||
# Test if we can connect before returning
|
||||
session = async_get_clientsession(self.hass)
|
||||
if await self._droplet_discovery.try_connect(
|
||||
session, user_input[CONF_CODE]
|
||||
):
|
||||
code = normalize_pairing_code(user_input[CONF_CODE])
|
||||
if await self._droplet_discovery.try_connect(session, code):
|
||||
device_data = {
|
||||
CONF_IP_ADDRESS: self._droplet_discovery.host,
|
||||
CONF_PORT: self._droplet_discovery.port,
|
||||
CONF_DEVICE_ID: device_id,
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
CONF_CODE: code,
|
||||
}
|
||||
|
||||
return self.async_create_entry(
|
||||
@@ -90,14 +94,15 @@ class DropletConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
user_input[CONF_IP_ADDRESS], DropletConnection.DEFAULT_PORT, ""
|
||||
)
|
||||
session = async_get_clientsession(self.hass)
|
||||
if await self._droplet_discovery.try_connect(
|
||||
session, user_input[CONF_CODE]
|
||||
) and (device_id := await self._droplet_discovery.get_device_id()):
|
||||
code = normalize_pairing_code(user_input[CONF_CODE])
|
||||
if await self._droplet_discovery.try_connect(session, code) and (
|
||||
device_id := await self._droplet_discovery.get_device_id()
|
||||
):
|
||||
device_data = {
|
||||
CONF_IP_ADDRESS: self._droplet_discovery.host,
|
||||
CONF_PORT: self._droplet_discovery.port,
|
||||
CONF_DEVICE_ID: device_id,
|
||||
CONF_CODE: user_input[CONF_CODE],
|
||||
CONF_CODE: code,
|
||||
}
|
||||
await self.async_set_unique_id(device_id, raise_on_progress=False)
|
||||
self._abort_if_unique_id_configured(
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.3.0"]
|
||||
"requirements": ["py-sucks==0.9.11", "deebot-client==16.4.0"]
|
||||
}
|
||||
|
||||
@@ -285,16 +285,14 @@ async def async_setup_entry(
|
||||
name=sensor.name,
|
||||
)
|
||||
|
||||
# Hourly rain doesn't reset to fixed hours, it must be measurement state classes
|
||||
# Only total rain needs state class for long-term statistics
|
||||
if sensor.key in (
|
||||
"hrain_piezomm",
|
||||
"hrain_piezo",
|
||||
"hourlyrainmm",
|
||||
"hourlyrainin",
|
||||
"totalrainin",
|
||||
"totalrainmm",
|
||||
):
|
||||
description = dataclasses.replace(
|
||||
description,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
)
|
||||
|
||||
async_add_entities([EcowittSensorEntity(sensor, description)])
|
||||
|
||||
401
homeassistant/components/energyid/__init__.py
Normal file
401
homeassistant/components/energyid/__init__.py
Normal file
@@ -0,0 +1,401 @@
|
||||
"""The EnergyID integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
import datetime as dt
|
||||
from datetime import timedelta
|
||||
import functools
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from energyid_webhooks.client_v2 import WebhookClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_entity_registry_updated_event,
|
||||
async_track_state_change_event,
|
||||
async_track_time_interval,
|
||||
)
|
||||
|
||||
from .const import (
|
||||
CONF_DEVICE_ID,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_ENERGYID_KEY,
|
||||
CONF_HA_ENTITY_UUID,
|
||||
CONF_PROVISIONING_KEY,
|
||||
CONF_PROVISIONING_SECRET,
|
||||
DOMAIN,
|
||||
)
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
type EnergyIDConfigEntry = ConfigEntry[EnergyIDRuntimeData]
|
||||
|
||||
DEFAULT_UPLOAD_INTERVAL_SECONDS = 60
|
||||
|
||||
|
||||
@dataclass
|
||||
class EnergyIDRuntimeData:
|
||||
"""Runtime data for the EnergyID integration."""
|
||||
|
||||
client: WebhookClient
|
||||
mappings: dict[str, str]
|
||||
state_listener: CALLBACK_TYPE | None = None
|
||||
registry_tracking_listener: CALLBACK_TYPE | None = None
|
||||
unavailable_logged: bool = False
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> bool:
|
||||
"""Set up EnergyID from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
client = WebhookClient(
|
||||
provisioning_key=entry.data[CONF_PROVISIONING_KEY],
|
||||
provisioning_secret=entry.data[CONF_PROVISIONING_SECRET],
|
||||
device_id=entry.data[CONF_DEVICE_ID],
|
||||
device_name=entry.data[CONF_DEVICE_NAME],
|
||||
session=session,
|
||||
)
|
||||
|
||||
entry.runtime_data = EnergyIDRuntimeData(
|
||||
client=client,
|
||||
mappings={},
|
||||
)
|
||||
|
||||
is_claimed = None
|
||||
try:
|
||||
is_claimed = await client.authenticate()
|
||||
except TimeoutError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Timeout authenticating with EnergyID: {err}"
|
||||
) from err
|
||||
except ClientResponseError as err:
|
||||
# 401/403 = invalid credentials, trigger reauth
|
||||
if err.status in (401, 403):
|
||||
raise ConfigEntryAuthFailed(f"Invalid credentials: {err}") from err
|
||||
# Other HTTP errors are likely temporary
|
||||
raise ConfigEntryNotReady(
|
||||
f"HTTP error authenticating with EnergyID: {err}"
|
||||
) from err
|
||||
except ClientError as err:
|
||||
# Network/connection errors are temporary
|
||||
raise ConfigEntryNotReady(
|
||||
f"Connection error authenticating with EnergyID: {err}"
|
||||
) from err
|
||||
except Exception as err:
|
||||
# Unknown errors - log and retry (safer than forcing reauth)
|
||||
_LOGGER.exception("Unexpected error during EnergyID authentication")
|
||||
raise ConfigEntryNotReady(
|
||||
f"Unexpected error authenticating with EnergyID: {err}"
|
||||
) from err
|
||||
|
||||
if not is_claimed:
|
||||
# Device exists but not claimed = user needs to claim it = auth issue
|
||||
raise ConfigEntryAuthFailed("Device is not claimed. Please re-authenticate.")
|
||||
|
||||
_LOGGER.debug("EnergyID device '%s' authenticated successfully", client.device_name)
|
||||
|
||||
async def _async_synchronize_sensors(now: dt.datetime | None = None) -> None:
|
||||
"""Callback for periodically synchronizing sensor data."""
|
||||
try:
|
||||
await client.synchronize_sensors()
|
||||
if entry.runtime_data.unavailable_logged:
|
||||
_LOGGER.debug("Connection to EnergyID re-established")
|
||||
entry.runtime_data.unavailable_logged = False
|
||||
except (OSError, RuntimeError) as err:
|
||||
if not entry.runtime_data.unavailable_logged:
|
||||
_LOGGER.debug("EnergyID is unavailable: %s", err)
|
||||
entry.runtime_data.unavailable_logged = True
|
||||
|
||||
upload_interval = DEFAULT_UPLOAD_INTERVAL_SECONDS
|
||||
if client.webhook_policy:
|
||||
upload_interval = client.webhook_policy.get(
|
||||
"uploadInterval", DEFAULT_UPLOAD_INTERVAL_SECONDS
|
||||
)
|
||||
|
||||
# Schedule the callback and automatically unsubscribe when the entry is unloaded.
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval(
|
||||
hass, _async_synchronize_sensors, timedelta(seconds=upload_interval)
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(entry.add_update_listener(config_entry_update_listener))
|
||||
|
||||
update_listeners(hass, entry)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Starting EnergyID background sync for '%s'",
|
||||
client.device_name,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def config_entry_update_listener(
|
||||
hass: HomeAssistant, entry: EnergyIDConfigEntry
|
||||
) -> None:
|
||||
"""Handle config entry updates, including subentry changes."""
|
||||
_LOGGER.debug("Config entry updated for %s, reloading listeners", entry.entry_id)
|
||||
update_listeners(hass, entry)
|
||||
|
||||
|
||||
@callback
|
||||
def update_listeners(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> None:
|
||||
"""Set up or update state listeners and queue initial states."""
|
||||
runtime_data = entry.runtime_data
|
||||
client = runtime_data.client
|
||||
|
||||
# Clean up old state listener
|
||||
if runtime_data.state_listener:
|
||||
runtime_data.state_listener()
|
||||
runtime_data.state_listener = None
|
||||
|
||||
mappings: dict[str, str] = {}
|
||||
entities_to_track: list[str] = []
|
||||
old_mappings = set(runtime_data.mappings.keys())
|
||||
new_mappings = set()
|
||||
ent_reg = er.async_get(hass)
|
||||
|
||||
subentries = list(entry.subentries.values())
|
||||
_LOGGER.debug(
|
||||
"Found %d subentries in entry.subentries: %s",
|
||||
len(subentries),
|
||||
[s.data for s in subentries],
|
||||
)
|
||||
|
||||
# Build current entity mappings
|
||||
tracked_entity_ids = []
|
||||
for subentry in subentries:
|
||||
entity_uuid = subentry.data.get(CONF_HA_ENTITY_UUID)
|
||||
energyid_key = subentry.data.get(CONF_ENERGYID_KEY)
|
||||
|
||||
if not (entity_uuid and energyid_key):
|
||||
continue
|
||||
|
||||
entity_entry = ent_reg.async_get(entity_uuid)
|
||||
if not entity_entry:
|
||||
_LOGGER.warning(
|
||||
"Entity with UUID %s does not exist, skipping mapping to %s",
|
||||
entity_uuid,
|
||||
energyid_key,
|
||||
)
|
||||
continue
|
||||
|
||||
ha_entity_id = entity_entry.entity_id
|
||||
tracked_entity_ids.append(ha_entity_id)
|
||||
|
||||
if not hass.states.get(ha_entity_id):
|
||||
# Entity exists in registry but is not present in the state machine
|
||||
_LOGGER.debug(
|
||||
"Entity %s does not exist in state machine yet, will track when available (mapping to %s)",
|
||||
ha_entity_id,
|
||||
energyid_key,
|
||||
)
|
||||
# Still add to entities_to_track so we can handle it when state appears
|
||||
entities_to_track.append(ha_entity_id)
|
||||
continue
|
||||
|
||||
mappings[ha_entity_id] = energyid_key
|
||||
entities_to_track.append(ha_entity_id)
|
||||
new_mappings.add(ha_entity_id)
|
||||
client.get_or_create_sensor(energyid_key)
|
||||
|
||||
if ha_entity_id not in old_mappings:
|
||||
_LOGGER.debug(
|
||||
"New mapping detected for %s, queuing initial state", ha_entity_id
|
||||
)
|
||||
if (
|
||||
current_state := hass.states.get(ha_entity_id)
|
||||
) and current_state.state not in (
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
):
|
||||
try:
|
||||
value = float(current_state.state)
|
||||
timestamp = current_state.last_updated or dt.datetime.now(dt.UTC)
|
||||
client.get_or_create_sensor(energyid_key).update(value, timestamp)
|
||||
except (ValueError, TypeError):
|
||||
_LOGGER.debug(
|
||||
"Could not convert initial state of %s to float: %s",
|
||||
ha_entity_id,
|
||||
current_state.state,
|
||||
)
|
||||
|
||||
# Clean up old entity registry listener
|
||||
if runtime_data.registry_tracking_listener:
|
||||
runtime_data.registry_tracking_listener()
|
||||
runtime_data.registry_tracking_listener = None
|
||||
|
||||
# Set up listeners for entity registry changes
|
||||
if tracked_entity_ids:
|
||||
_LOGGER.debug("Setting up entity registry tracking for: %s", tracked_entity_ids)
|
||||
|
||||
def _handle_entity_registry_change(
|
||||
event: Event[er.EventEntityRegistryUpdatedData],
|
||||
) -> None:
|
||||
"""Handle entity registry changes for our tracked entities."""
|
||||
_LOGGER.debug("Registry event for tracked entity: %s", event.data)
|
||||
|
||||
if event.data["action"] == "update":
|
||||
# Type is now narrowed to _EventEntityRegistryUpdatedData_Update
|
||||
if "entity_id" in event.data["changes"]:
|
||||
old_entity_id = event.data["changes"]["entity_id"]
|
||||
new_entity_id = event.data["entity_id"]
|
||||
|
||||
_LOGGER.debug(
|
||||
"Tracked entity ID changed: %s -> %s",
|
||||
old_entity_id,
|
||||
new_entity_id,
|
||||
)
|
||||
# Entity ID changed, need to reload listeners to track new ID
|
||||
update_listeners(hass, entry)
|
||||
|
||||
elif event.data["action"] == "remove":
|
||||
_LOGGER.debug("Tracked entity removed: %s", event.data["entity_id"])
|
||||
# reminder: Create repair issue to notify user about removed entity
|
||||
update_listeners(hass, entry)
|
||||
|
||||
# Track the specific entity IDs we care about
|
||||
unsub_entity_registry = async_track_entity_registry_updated_event(
|
||||
hass, tracked_entity_ids, _handle_entity_registry_change
|
||||
)
|
||||
runtime_data.registry_tracking_listener = unsub_entity_registry
|
||||
|
||||
if removed_mappings := old_mappings - new_mappings:
|
||||
_LOGGER.debug("Removed mappings: %s", ", ".join(removed_mappings))
|
||||
|
||||
runtime_data.mappings = mappings
|
||||
|
||||
if not entities_to_track:
|
||||
_LOGGER.debug(
|
||||
"No valid sensor mappings configured for '%s'", client.device_name
|
||||
)
|
||||
return
|
||||
|
||||
unsub_state_change = async_track_state_change_event(
|
||||
hass,
|
||||
entities_to_track,
|
||||
functools.partial(_async_handle_state_change, hass, entry.entry_id),
|
||||
)
|
||||
runtime_data.state_listener = unsub_state_change
|
||||
|
||||
_LOGGER.debug(
|
||||
"Now tracking state changes for %d entities for '%s': %s",
|
||||
len(entities_to_track),
|
||||
client.device_name,
|
||||
entities_to_track,
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_handle_state_change(
|
||||
hass: HomeAssistant, entry_id: str, event: Event[EventStateChangedData]
|
||||
) -> None:
|
||||
"""Handle state changes for tracked entities."""
|
||||
entity_id = event.data["entity_id"]
|
||||
new_state = event.data["new_state"]
|
||||
|
||||
_LOGGER.debug(
|
||||
"State change detected for entity: %s, new value: %s",
|
||||
entity_id,
|
||||
new_state.state if new_state else "None",
|
||||
)
|
||||
|
||||
if not new_state or new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
||||
return
|
||||
|
||||
entry = hass.config_entries.async_get_entry(entry_id)
|
||||
if not entry or not hasattr(entry, "runtime_data"):
|
||||
# Entry is being unloaded or not yet fully initialized
|
||||
return
|
||||
|
||||
runtime_data = entry.runtime_data
|
||||
client = runtime_data.client
|
||||
|
||||
# Check if entity is already mapped
|
||||
if energyid_key := runtime_data.mappings.get(entity_id):
|
||||
# Entity already mapped, just update value
|
||||
_LOGGER.debug(
|
||||
"Updating EnergyID sensor %s with value %s", energyid_key, new_state.state
|
||||
)
|
||||
else:
|
||||
# Entity not mapped yet - check if it should be (handles late-appearing entities)
|
||||
ent_reg = er.async_get(hass)
|
||||
for subentry in entry.subentries.values():
|
||||
entity_uuid = subentry.data.get(CONF_HA_ENTITY_UUID)
|
||||
energyid_key_candidate = subentry.data.get(CONF_ENERGYID_KEY)
|
||||
|
||||
if not (entity_uuid and energyid_key_candidate):
|
||||
continue
|
||||
|
||||
entity_entry = ent_reg.async_get(entity_uuid)
|
||||
if entity_entry and entity_entry.entity_id == entity_id:
|
||||
# Found it! Add to mappings and send initial value
|
||||
energyid_key = energyid_key_candidate
|
||||
runtime_data.mappings[entity_id] = energyid_key
|
||||
client.get_or_create_sensor(energyid_key)
|
||||
_LOGGER.debug(
|
||||
"Entity %s now available in state machine, adding to mappings (key: %s)",
|
||||
entity_id,
|
||||
energyid_key,
|
||||
)
|
||||
break
|
||||
else:
|
||||
# Not a tracked entity, ignore
|
||||
return
|
||||
|
||||
try:
|
||||
value = float(new_state.state)
|
||||
except (ValueError, TypeError):
|
||||
return
|
||||
|
||||
client.get_or_create_sensor(energyid_key).update(value, new_state.last_updated)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: EnergyIDConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
_LOGGER.debug("Unloading EnergyID entry for %s", entry.title)
|
||||
|
||||
try:
|
||||
# Unload subentries if present (guarded for test and reload scenarios)
|
||||
if hasattr(hass.config_entries, "async_entries") and hasattr(entry, "entry_id"):
|
||||
subentries = [
|
||||
e.entry_id
|
||||
for e in hass.config_entries.async_entries(DOMAIN)
|
||||
if getattr(e, "parent_entry", None) == entry.entry_id
|
||||
]
|
||||
for subentry_id in subentries:
|
||||
await hass.config_entries.async_unload(subentry_id)
|
||||
|
||||
# Only clean up listeners and client if runtime_data is present
|
||||
if hasattr(entry, "runtime_data"):
|
||||
runtime_data = entry.runtime_data
|
||||
|
||||
# Remove state listener
|
||||
if runtime_data.state_listener:
|
||||
runtime_data.state_listener()
|
||||
|
||||
# Remove registry tracking listener
|
||||
if runtime_data.registry_tracking_listener:
|
||||
runtime_data.registry_tracking_listener()
|
||||
|
||||
try:
|
||||
await runtime_data.client.close()
|
||||
except Exception:
|
||||
_LOGGER.exception("Error closing EnergyID client for %s", entry.title)
|
||||
del entry.runtime_data
|
||||
except Exception:
|
||||
_LOGGER.exception("Error during async_unload_entry for %s", entry.title)
|
||||
return False
|
||||
return True
|
||||
293
homeassistant/components/energyid/config_flow.py
Normal file
293
homeassistant/components/energyid/config_flow.py
Normal file
@@ -0,0 +1,293 @@
|
||||
"""Config flow for EnergyID integration."""
|
||||
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
from energyid_webhooks.client_v2 import WebhookClient
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
)
|
||||
from homeassistant.core import callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
import homeassistant.helpers.config_validation as cv
|
||||
from homeassistant.helpers.instance_id import async_get as async_get_instance_id
|
||||
|
||||
from .const import (
|
||||
CONF_DEVICE_ID,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_PROVISIONING_KEY,
|
||||
CONF_PROVISIONING_SECRET,
|
||||
DOMAIN,
|
||||
ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX,
|
||||
MAX_POLLING_ATTEMPTS,
|
||||
NAME,
|
||||
POLLING_INTERVAL,
|
||||
)
|
||||
from .energyid_sensor_mapping_flow import EnergyIDSensorMappingFlowHandler
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EnergyIDConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle the configuration flow for the EnergyID integration."""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the config flow."""
|
||||
self._flow_data: dict[str, Any] = {}
|
||||
self._polling_task: asyncio.Task | None = None
|
||||
|
||||
async def _perform_auth_and_get_details(self) -> str | None:
|
||||
"""Authenticate with EnergyID and retrieve device details."""
|
||||
_LOGGER.debug("Starting authentication with EnergyID")
|
||||
client = WebhookClient(
|
||||
provisioning_key=self._flow_data[CONF_PROVISIONING_KEY],
|
||||
provisioning_secret=self._flow_data[CONF_PROVISIONING_SECRET],
|
||||
device_id=self._flow_data[CONF_DEVICE_ID],
|
||||
device_name=self._flow_data[CONF_DEVICE_NAME],
|
||||
session=async_get_clientsession(self.hass),
|
||||
)
|
||||
try:
|
||||
is_claimed = await client.authenticate()
|
||||
except ClientResponseError as err:
|
||||
if err.status == 401:
|
||||
_LOGGER.debug("Invalid provisioning key or secret")
|
||||
return "invalid_auth"
|
||||
_LOGGER.debug(
|
||||
"Client response error during EnergyID authentication: %s", err
|
||||
)
|
||||
return "cannot_connect"
|
||||
except ClientError as err:
|
||||
_LOGGER.debug(
|
||||
"Failed to connect to EnergyID during authentication: %s", err
|
||||
)
|
||||
return "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error during EnergyID authentication")
|
||||
return "unknown_auth_error"
|
||||
else:
|
||||
_LOGGER.debug("Authentication successful, claimed: %s", is_claimed)
|
||||
|
||||
if is_claimed:
|
||||
self._flow_data["record_number"] = client.recordNumber
|
||||
self._flow_data["record_name"] = client.recordName
|
||||
_LOGGER.debug(
|
||||
"Device claimed with record number: %s, record name: %s",
|
||||
client.recordNumber,
|
||||
client.recordName,
|
||||
)
|
||||
return None
|
||||
|
||||
self._flow_data["claim_info"] = client.get_claim_info()
|
||||
self._flow_data["claim_info"]["integration_name"] = NAME
|
||||
_LOGGER.debug(
|
||||
"Device needs claim, claim info: %s", self._flow_data["claim_info"]
|
||||
)
|
||||
return "needs_claim"
|
||||
|
||||
async def _async_poll_for_claim(self) -> None:
|
||||
"""Poll EnergyID to check if device has been claimed."""
|
||||
for _attempt in range(1, MAX_POLLING_ATTEMPTS + 1):
|
||||
await asyncio.sleep(POLLING_INTERVAL)
|
||||
|
||||
auth_status = await self._perform_auth_and_get_details()
|
||||
|
||||
if auth_status is None:
|
||||
# Device claimed - advance flow to async_step_create_entry
|
||||
_LOGGER.debug("Device claimed, advancing to create entry")
|
||||
self.hass.async_create_task(
|
||||
self.hass.config_entries.flow.async_configure(self.flow_id)
|
||||
)
|
||||
return
|
||||
|
||||
if auth_status != "needs_claim":
|
||||
# Stop polling on non-transient errors
|
||||
# No user notification needed here as the error will be handled
|
||||
# in the next flow step when the user continues the flow
|
||||
_LOGGER.debug("Polling stopped due to error: %s", auth_status)
|
||||
return
|
||||
|
||||
_LOGGER.debug("Polling timeout after %s attempts", MAX_POLLING_ATTEMPTS)
|
||||
# No user notification here because:
|
||||
# 1. User may still be completing the claim process in EnergyID portal
|
||||
# 2. Immediate notification could interrupt their workflow or cause confusion
|
||||
# 3. When user clicks "Submit" to continue, the flow validates claim status
|
||||
# and will show appropriate error/success messages based on current state
|
||||
# 4. Timeout allows graceful fallback: user can retry claim or see proper error
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step of the configuration flow."""
|
||||
_LOGGER.debug("Starting user step with input: %s", user_input)
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
instance_id = await async_get_instance_id(self.hass)
|
||||
# Note: This device_id is for EnergyID's webhook system, not related to HA's device registry
|
||||
device_suffix = f"{int(asyncio.get_event_loop().time() * 1000)}"
|
||||
device_id = (
|
||||
f"{ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX}{instance_id}_{device_suffix}"
|
||||
)
|
||||
self._flow_data = {
|
||||
**user_input,
|
||||
CONF_DEVICE_ID: device_id,
|
||||
CONF_DEVICE_NAME: self.hass.config.location_name,
|
||||
}
|
||||
_LOGGER.debug("Flow data after user input: %s", self._flow_data)
|
||||
|
||||
auth_status = await self._perform_auth_and_get_details()
|
||||
|
||||
if auth_status is None:
|
||||
await self.async_set_unique_id(device_id)
|
||||
self._abort_if_unique_id_configured()
|
||||
_LOGGER.debug(
|
||||
"Creating entry with title: %s", self._flow_data["record_name"]
|
||||
)
|
||||
return self.async_create_entry(
|
||||
title=self._flow_data["record_name"],
|
||||
data=self._flow_data,
|
||||
description="add_sensor_mapping_hint",
|
||||
description_placeholders={"integration_name": NAME},
|
||||
)
|
||||
|
||||
if auth_status == "needs_claim":
|
||||
_LOGGER.debug("Redirecting to auth and claim step")
|
||||
return await self.async_step_auth_and_claim()
|
||||
|
||||
errors["base"] = auth_status
|
||||
_LOGGER.debug("Errors encountered during user step: %s", errors)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PROVISIONING_KEY): str,
|
||||
vol.Required(CONF_PROVISIONING_SECRET): cv.string,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"docs_url": "https://app.energyid.eu/integrations/home-assistant",
|
||||
"integration_name": NAME,
|
||||
},
|
||||
)
|
||||
|
||||
async def async_step_auth_and_claim(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the step for device claiming using external step with polling."""
|
||||
_LOGGER.debug("Starting auth and claim step with input: %s", user_input)
|
||||
|
||||
claim_info = self._flow_data.get("claim_info", {})
|
||||
|
||||
# Start polling when we first enter this step
|
||||
if self._polling_task is None:
|
||||
self._polling_task = self.hass.async_create_task(
|
||||
self._async_poll_for_claim()
|
||||
)
|
||||
|
||||
# Show external step to open the EnergyID website
|
||||
return self.async_external_step(
|
||||
step_id="auth_and_claim",
|
||||
url=claim_info.get("claim_url", ""),
|
||||
description_placeholders=claim_info,
|
||||
)
|
||||
|
||||
# Check if device has been claimed
|
||||
auth_status = await self._perform_auth_and_get_details()
|
||||
|
||||
if auth_status is None:
|
||||
# Device has been claimed
|
||||
if self._polling_task and not self._polling_task.done():
|
||||
self._polling_task.cancel()
|
||||
self._polling_task = None
|
||||
return self.async_external_step_done(next_step_id="create_entry")
|
||||
|
||||
# Device not claimed yet, show the external step again
|
||||
if self._polling_task and not self._polling_task.done():
|
||||
self._polling_task.cancel()
|
||||
self._polling_task = None
|
||||
return self.async_external_step(
|
||||
step_id="auth_and_claim",
|
||||
url=claim_info.get("claim_url", ""),
|
||||
description_placeholders=claim_info,
|
||||
)
|
||||
|
||||
async def async_step_create_entry(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Final step to create the entry after successful claim."""
|
||||
_LOGGER.debug("Creating entry with title: %s", self._flow_data["record_name"])
|
||||
return self.async_create_entry(
|
||||
title=self._flow_data["record_name"],
|
||||
data=self._flow_data,
|
||||
description="add_sensor_mapping_hint",
|
||||
description_placeholders={"integration_name": NAME},
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauthentication upon an API authentication error."""
|
||||
# Note: This device_id is for EnergyID's webhook system, not related to HA's device registry
|
||||
self._flow_data = {
|
||||
CONF_DEVICE_ID: entry_data[CONF_DEVICE_ID],
|
||||
CONF_DEVICE_NAME: entry_data[CONF_DEVICE_NAME],
|
||||
}
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauthentication dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._flow_data.update(user_input)
|
||||
auth_status = await self._perform_auth_and_get_details()
|
||||
|
||||
if auth_status is None:
|
||||
# Authentication successful and claimed
|
||||
await self.async_set_unique_id(self._flow_data["record_number"])
|
||||
self._abort_if_unique_id_mismatch(reason="wrong_account")
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data_updates={
|
||||
CONF_PROVISIONING_KEY: user_input[CONF_PROVISIONING_KEY],
|
||||
CONF_PROVISIONING_SECRET: user_input[CONF_PROVISIONING_SECRET],
|
||||
},
|
||||
)
|
||||
|
||||
if auth_status == "needs_claim":
|
||||
return await self.async_step_auth_and_claim()
|
||||
|
||||
errors["base"] = auth_status
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PROVISIONING_KEY): str,
|
||||
vol.Required(CONF_PROVISIONING_SECRET): cv.string,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={
|
||||
"docs_url": "https://app.energyid.eu/integrations/home-assistant",
|
||||
"integration_name": NAME,
|
||||
},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"sensor_mapping": EnergyIDSensorMappingFlowHandler}
|
||||
21
homeassistant/components/energyid/const.py
Normal file
21
homeassistant/components/energyid/const.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Constants for the EnergyID integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "energyid"
|
||||
NAME: Final = "EnergyID"
|
||||
|
||||
# --- Config Flow and Entry Data ---
|
||||
CONF_PROVISIONING_KEY: Final = "provisioning_key"
|
||||
CONF_PROVISIONING_SECRET: Final = "provisioning_secret"
|
||||
CONF_DEVICE_ID: Final = "device_id"
|
||||
CONF_DEVICE_NAME: Final = "device_name"
|
||||
|
||||
# --- Subentry (Mapping) Data ---
|
||||
CONF_HA_ENTITY_UUID: Final = "ha_entity_uuid"
|
||||
CONF_ENERGYID_KEY: Final = "energyid_key"
|
||||
|
||||
# --- Webhook and Polling Configuration ---
|
||||
ENERGYID_DEVICE_ID_FOR_WEBHOOK_PREFIX: Final = "homeassistant_eid_"
|
||||
POLLING_INTERVAL: Final = 2 # seconds
|
||||
MAX_POLLING_ATTEMPTS: Final = 60 # 2 minutes total
|
||||
@@ -0,0 +1,156 @@
|
||||
"""Subentry flow for EnergyID integration, handling sensor mapping management."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.sensor import SensorDeviceClass, SensorStateClass
|
||||
from homeassistant.config_entries import ConfigSubentryFlow, SubentryFlowResult
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
from homeassistant.helpers.selector import EntitySelector, EntitySelectorConfig
|
||||
|
||||
from .const import CONF_ENERGYID_KEY, CONF_HA_ENTITY_UUID, DOMAIN, NAME
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@callback
|
||||
def _get_suggested_entities(hass: HomeAssistant) -> list[str]:
|
||||
"""Return a sorted list of suggested sensor entity IDs for mapping."""
|
||||
ent_reg = er.async_get(hass)
|
||||
suitable_entities = []
|
||||
|
||||
for entity_entry in ent_reg.entities.values():
|
||||
if not (
|
||||
entity_entry.domain == Platform.SENSOR and entity_entry.platform != DOMAIN
|
||||
):
|
||||
continue
|
||||
|
||||
if not hass.states.get(entity_entry.entity_id):
|
||||
continue
|
||||
|
||||
state_class = (entity_entry.capabilities or {}).get("state_class")
|
||||
has_numeric_indicators = (
|
||||
state_class
|
||||
in (
|
||||
SensorStateClass.MEASUREMENT,
|
||||
SensorStateClass.TOTAL,
|
||||
SensorStateClass.TOTAL_INCREASING,
|
||||
)
|
||||
or entity_entry.device_class
|
||||
in (
|
||||
SensorDeviceClass.ENERGY,
|
||||
SensorDeviceClass.GAS,
|
||||
SensorDeviceClass.POWER,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
SensorDeviceClass.VOLUME,
|
||||
)
|
||||
or entity_entry.original_device_class
|
||||
in (
|
||||
SensorDeviceClass.ENERGY,
|
||||
SensorDeviceClass.GAS,
|
||||
SensorDeviceClass.POWER,
|
||||
SensorDeviceClass.TEMPERATURE,
|
||||
SensorDeviceClass.VOLUME,
|
||||
)
|
||||
)
|
||||
|
||||
if has_numeric_indicators:
|
||||
suitable_entities.append(entity_entry.entity_id)
|
||||
|
||||
return sorted(suitable_entities)
|
||||
|
||||
|
||||
@callback
|
||||
def _validate_mapping_input(
|
||||
ha_entity_id: str | None,
|
||||
current_mappings: set[str],
|
||||
ent_reg: er.EntityRegistry,
|
||||
) -> dict[str, str]:
|
||||
"""Validate mapping input and return errors if any."""
|
||||
errors: dict[str, str] = {}
|
||||
if not ha_entity_id:
|
||||
errors["base"] = "entity_required"
|
||||
return errors
|
||||
|
||||
# Check if entity exists
|
||||
entity_entry = ent_reg.async_get(ha_entity_id)
|
||||
if not entity_entry:
|
||||
errors["base"] = "entity_not_found"
|
||||
return errors
|
||||
|
||||
# Check if entity is already mapped (by UUID)
|
||||
entity_uuid = entity_entry.id
|
||||
if entity_uuid in current_mappings:
|
||||
errors["base"] = "entity_already_mapped"
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
class EnergyIDSensorMappingFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle EnergyID sensor mapping subentry flow for adding new mappings."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> SubentryFlowResult:
|
||||
"""Handle the user step for adding a new sensor mapping."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
config_entry = self._get_entry()
|
||||
ent_reg = er.async_get(self.hass)
|
||||
|
||||
if user_input is not None:
|
||||
ha_entity_id = user_input.get("ha_entity_id")
|
||||
|
||||
# Get current mappings by UUID
|
||||
current_mappings = {
|
||||
uuid
|
||||
for sub in config_entry.subentries.values()
|
||||
if (uuid := sub.data.get(CONF_HA_ENTITY_UUID)) is not None
|
||||
}
|
||||
|
||||
errors = _validate_mapping_input(ha_entity_id, current_mappings, ent_reg)
|
||||
|
||||
if not errors and ha_entity_id:
|
||||
# Get entity registry entry
|
||||
entity_entry = ent_reg.async_get(ha_entity_id)
|
||||
if entity_entry:
|
||||
energyid_key = ha_entity_id.split(".", 1)[-1]
|
||||
|
||||
subentry_data = {
|
||||
CONF_HA_ENTITY_UUID: entity_entry.id, # Store UUID only
|
||||
CONF_ENERGYID_KEY: energyid_key,
|
||||
}
|
||||
|
||||
title = f"{ha_entity_id.split('.', 1)[-1]} connection to {NAME}"
|
||||
_LOGGER.debug(
|
||||
"Creating subentry with title='%s', data=%s",
|
||||
title,
|
||||
subentry_data,
|
||||
)
|
||||
_LOGGER.debug("Parent config entry ID: %s", config_entry.entry_id)
|
||||
_LOGGER.debug(
|
||||
"Creating subentry with parent: %s", self._get_entry().entry_id
|
||||
)
|
||||
return self.async_create_entry(title=title, data=subentry_data)
|
||||
errors["base"] = "entity_not_found"
|
||||
|
||||
suggested_entities = _get_suggested_entities(self.hass)
|
||||
|
||||
data_schema = vol.Schema(
|
||||
{
|
||||
vol.Required("ha_entity_id"): EntitySelector(
|
||||
EntitySelectorConfig(include_entities=suggested_entities)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=data_schema,
|
||||
errors=errors,
|
||||
description_placeholders={"integration_name": NAME},
|
||||
)
|
||||
12
homeassistant/components/energyid/manifest.json
Normal file
12
homeassistant/components/energyid/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "energyid",
|
||||
"name": "EnergyID",
|
||||
"codeowners": ["@JrtPec", "@Molier"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/energyid",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["energyid_webhooks"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["energyid-webhooks==0.0.14"]
|
||||
}
|
||||
137
homeassistant/components/energyid/quality_scale.yaml
Normal file
137
homeassistant/components/energyid/quality_scale.yaml
Normal file
@@ -0,0 +1,137 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: The integration does not expose any custom service actions.
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: The integration uses a push-based mechanism with a background sync task, not polling.
|
||||
brands:
|
||||
status: done
|
||||
common-modules:
|
||||
status: done
|
||||
config-flow-test-coverage:
|
||||
status: done
|
||||
config-flow:
|
||||
status: done
|
||||
dependency-transparency:
|
||||
status: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: The integration does not expose any custom service actions.
|
||||
docs-high-level-description:
|
||||
status: done
|
||||
docs-installation-instructions:
|
||||
status: done
|
||||
docs-removal-instructions:
|
||||
status: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
entity-unique-id:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
has-entity-name:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
runtime-data:
|
||||
status: done
|
||||
test-before-configure:
|
||||
status: done
|
||||
test-before-setup:
|
||||
status: done
|
||||
unique-config-entry:
|
||||
status: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: The integration does not expose any custom service actions.
|
||||
config-entry-unloading:
|
||||
status: done
|
||||
docs-configuration-parameters:
|
||||
status: done
|
||||
docs-installation-parameters:
|
||||
status: done
|
||||
entity-unavailable:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
integration-owner:
|
||||
status: done
|
||||
log-when-unavailable:
|
||||
status: done
|
||||
comment: The integration logs a single message when the EnergyID service is unavailable.
|
||||
parallel-updates:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
reauthentication-flow:
|
||||
status: done
|
||||
test-coverage:
|
||||
status: done
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: exempt
|
||||
comment: The integration does not create any entities, nor does it create devices.
|
||||
diagnostics:
|
||||
status: todo
|
||||
comment: Diagnostics will be added in a follow-up PR to help with debugging.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: Configuration requires manual entry of provisioning credentials.
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: No discovery mechanism is used.
|
||||
docs-data-update:
|
||||
status: done
|
||||
docs-examples:
|
||||
status: done
|
||||
docs-known-limitations:
|
||||
status: done
|
||||
docs-supported-devices:
|
||||
status: exempt
|
||||
comment: This is a service integration not tied to specific device models.
|
||||
docs-supported-functions:
|
||||
status: done
|
||||
docs-troubleshooting:
|
||||
status: done
|
||||
docs-use-cases:
|
||||
status: done
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: The integration creates a single device entry for the service connection.
|
||||
entity-category:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
entity-device-class:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
entity-translations:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
exception-translations:
|
||||
status: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: This integration does not create its own entities.
|
||||
reconfiguration-flow:
|
||||
status: todo
|
||||
comment: Reconfiguration will be added in a follow-up PR to allow updating the device name.
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: Authentication issues are handled via the reauthentication flow.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Creates a single service device entry tied to the config entry.
|
||||
|
||||
# Platinum
|
||||
async-dependency:
|
||||
status: done
|
||||
inject-websession:
|
||||
status: done
|
||||
strict-typing:
|
||||
status: todo
|
||||
comment: Full strict typing compliance will be addressed in a future update.
|
||||
71
homeassistant/components/energyid/strings.json
Normal file
71
homeassistant/components/energyid/strings.json
Normal file
@@ -0,0 +1,71 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "This device is already configured.",
|
||||
"reauth_successful": "Reauthentication successful."
|
||||
},
|
||||
"create_entry": {
|
||||
"add_sensor_mapping_hint": "You can now add mappings from any sensor in Home Assistant to {integration_name} using the '+ add sensor mapping' button."
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "Failed to connect to {integration_name} API.",
|
||||
"claim_failed_or_timed_out": "Claiming the device failed or the code expired.",
|
||||
"invalid_auth": "Invalid provisioning key or secret.",
|
||||
"unknown_auth_error": "Unexpected error occurred during authentication."
|
||||
},
|
||||
"step": {
|
||||
"auth_and_claim": {
|
||||
"description": "This Home Assistant connection needs to be claimed in your {integration_name} portal before it can send data.\n\n1. Go to: {claim_url}\n2. Enter code: **{claim_code}**\n3. (Code expires: {valid_until})\n\nAfter successfully claiming the device in {integration_name}, select **Submit** below to continue.",
|
||||
"title": "Claim device in {integration_name}"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"provisioning_key": "[%key:component::energyid::config::step::user::data::provisioning_key%]",
|
||||
"provisioning_secret": "[%key:component::energyid::config::step::user::data::provisioning_secret%]"
|
||||
},
|
||||
"data_description": {
|
||||
"provisioning_key": "[%key:component::energyid::config::step::user::data_description::provisioning_key%]",
|
||||
"provisioning_secret": "[%key:component::energyid::config::step::user::data_description::provisioning_secret%]"
|
||||
},
|
||||
"description": "Please re-enter your {integration_name} provisioning key and secret to restore the connection.\n\nMore info: {docs_url}",
|
||||
"title": "Reauthenticate {integration_name}"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"provisioning_key": "Provisioning key",
|
||||
"provisioning_secret": "Provisioning secret"
|
||||
},
|
||||
"data_description": {
|
||||
"provisioning_key": "Your unique key for provisioning.",
|
||||
"provisioning_secret": "Your secret associated with the provisioning key."
|
||||
},
|
||||
"description": "Enter your {integration_name} webhook provisioning key and secret. Find these in your {integration_name} integration setup under provisioning credentials.\n\nMore info: {docs_url}",
|
||||
"title": "Connect to {integration_name}"
|
||||
}
|
||||
}
|
||||
},
|
||||
"config_subentries": {
|
||||
"sensor_mapping": {
|
||||
"entry_type": "service",
|
||||
"error": {
|
||||
"entity_already_mapped": "This Home Assistant entity is already mapped.",
|
||||
"entity_required": "You must select a sensor entity."
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "Add sensor mapping"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"ha_entity_id": "Home Assistant sensor"
|
||||
},
|
||||
"data_description": {
|
||||
"ha_entity_id": "Select the sensor from Home Assistant to send to {integration_name}."
|
||||
},
|
||||
"description": "Select a Home Assistant sensor to send to {integration_name}. The sensor name will be used as the {integration_name} metric key.",
|
||||
"title": "Add sensor mapping"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -7,7 +7,7 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pyenphase"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pyenphase==2.4.0"],
|
||||
"requirements": ["pyenphase==2.4.2"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"type": "_enphase-envoy._tcp.local."
|
||||
|
||||
@@ -25,6 +25,7 @@ from .domain_data import DomainData
|
||||
from .encryption_key_storage import async_get_encryption_key_storage
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
from .manager import DEVICE_CONFLICT_ISSUE_FORMAT, ESPHomeManager, cleanup_instance
|
||||
from .websocket_api import async_setup as async_setup_websocket_api
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -38,6 +39,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
ffmpeg_proxy.async_setup(hass)
|
||||
await assist_satellite.async_setup(hass)
|
||||
await dashboard.async_setup(hass)
|
||||
async_setup_websocket_api(hass)
|
||||
return True
|
||||
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
DEFAULT_PORT: Final = 6053
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2025.8.0"
|
||||
STABLE_BLE_VERSION_STR = "2025.11.0"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
|
||||
@@ -157,7 +157,7 @@
|
||||
"title": "[%key:component::assist_pipeline::issues::assist_in_progress_deprecated::title%]"
|
||||
},
|
||||
"ble_firmware_outdated": {
|
||||
"description": "To improve Bluetooth reliability and performance, we highly recommend updating {name} with ESPHome {version} or later. When updating the device from ESPHome earlier than 2022.12.0, it is recommended to use a serial cable instead of an over-the-air update to take advantage of the new partition scheme.",
|
||||
"description": "ESPHome {version} introduces ultra-low latency event processing, reducing BLE event delays from 0-16 milliseconds to approximately 12 microseconds. This resolves stability issues when pairing, connecting, or handshaking with devices that require low latency, and makes Bluetooth proxy operations rival or exceed local adapters. We highly recommend updating {name} to take advantage of these improvements.",
|
||||
"title": "Update {name} with ESPHome {version} or later"
|
||||
},
|
||||
"device_conflict": {
|
||||
|
||||
52
homeassistant/components/esphome/websocket_api.py
Normal file
52
homeassistant/components/esphome/websocket_api.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""ESPHome websocket API."""
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
|
||||
from .const import CONF_NOISE_PSK
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
TYPE = "type"
|
||||
ENTRY_ID = "entry_id"
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup(hass: HomeAssistant) -> None:
|
||||
"""Set up the websocket API."""
|
||||
websocket_api.async_register_command(hass, get_encryption_key)
|
||||
|
||||
|
||||
@callback
|
||||
@websocket_api.require_admin
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required(TYPE): "esphome/get_encryption_key",
|
||||
vol.Required(ENTRY_ID): str,
|
||||
}
|
||||
)
|
||||
def get_encryption_key(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.connection.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Get the encryption key for an ESPHome config entry."""
|
||||
entry = hass.config_entries.async_get_entry(msg[ENTRY_ID])
|
||||
if entry is None:
|
||||
connection.send_error(
|
||||
msg["id"], websocket_api.ERR_NOT_FOUND, "Config entry not found"
|
||||
)
|
||||
return
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
{
|
||||
"encryption_key": entry.data.get(CONF_NOISE_PSK),
|
||||
},
|
||||
)
|
||||
@@ -102,6 +102,7 @@ SENSORS: tuple[EssentSensorEntityDescription, ...] = (
|
||||
key="average_today",
|
||||
translation_key="average_today",
|
||||
value_fn=lambda energy_data: energy_data.avg_price,
|
||||
energy_types=(EnergyType.ELECTRICITY,),
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="lowest_price_today",
|
||||
|
||||
@@ -44,9 +44,6 @@
|
||||
"electricity_next_price": {
|
||||
"name": "Next electricity price"
|
||||
},
|
||||
"gas_average_today": {
|
||||
"name": "Average gas price today"
|
||||
},
|
||||
"gas_current_price": {
|
||||
"name": "Current gas price"
|
||||
},
|
||||
|
||||
@@ -1,22 +1,30 @@
|
||||
"""API for fitbit bound to Home Assistant OAuth."""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from collections.abc import Callable
|
||||
from collections.abc import Awaitable, Callable
|
||||
import logging
|
||||
from typing import Any, cast
|
||||
|
||||
from fitbit import Fitbit
|
||||
from fitbit.exceptions import HTTPException, HTTPUnauthorized
|
||||
from fitbit_web_api import ApiClient, Configuration, DevicesApi
|
||||
from fitbit_web_api.exceptions import (
|
||||
ApiException,
|
||||
OpenApiException,
|
||||
UnauthorizedException,
|
||||
)
|
||||
from fitbit_web_api.models.device import Device
|
||||
from requests.exceptions import ConnectionError as RequestsConnectionError
|
||||
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_entry_oauth2_flow
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.util.unit_system import METRIC_SYSTEM
|
||||
|
||||
from .const import FitbitUnitSystem
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice, FitbitProfile
|
||||
from .model import FitbitProfile
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -58,6 +66,14 @@ class FitbitApi(ABC):
|
||||
expires_at=float(token[CONF_EXPIRES_AT]),
|
||||
)
|
||||
|
||||
async def _async_get_fitbit_web_api(self) -> ApiClient:
|
||||
"""Create and return an ApiClient configured with the current access token."""
|
||||
token = await self.async_get_access_token()
|
||||
configuration = Configuration()
|
||||
configuration.pool_manager = async_get_clientsession(self._hass)
|
||||
configuration.access_token = token[CONF_ACCESS_TOKEN]
|
||||
return ApiClient(configuration)
|
||||
|
||||
async def async_get_user_profile(self) -> FitbitProfile:
|
||||
"""Return the user profile from the API."""
|
||||
if self._profile is None:
|
||||
@@ -94,21 +110,13 @@ class FitbitApi(ABC):
|
||||
return FitbitUnitSystem.METRIC
|
||||
return FitbitUnitSystem.EN_US
|
||||
|
||||
async def async_get_devices(self) -> list[FitbitDevice]:
|
||||
"""Return available devices."""
|
||||
client = await self._async_get_client()
|
||||
devices: list[dict[str, str]] = await self._run(client.get_devices)
|
||||
async def async_get_devices(self) -> list[Device]:
|
||||
"""Return available devices using fitbit-web-api."""
|
||||
client = await self._async_get_fitbit_web_api()
|
||||
devices_api = DevicesApi(client)
|
||||
devices: list[Device] = await self._run_async(devices_api.get_devices)
|
||||
_LOGGER.debug("get_devices=%s", devices)
|
||||
return [
|
||||
FitbitDevice(
|
||||
id=device["id"],
|
||||
device_version=device["deviceVersion"],
|
||||
battery_level=int(device["batteryLevel"]),
|
||||
battery=device["battery"],
|
||||
type=device["type"],
|
||||
)
|
||||
for device in devices
|
||||
]
|
||||
return devices
|
||||
|
||||
async def async_get_latest_time_series(self, resource_type: str) -> dict[str, Any]:
|
||||
"""Return the most recent value from the time series for the specified resource type."""
|
||||
@@ -140,6 +148,20 @@ class FitbitApi(ABC):
|
||||
_LOGGER.debug("Error from fitbit API: %s", err)
|
||||
raise FitbitApiException("Error from fitbit API") from err
|
||||
|
||||
async def _run_async[_T](self, func: Callable[[], Awaitable[_T]]) -> _T:
|
||||
"""Run client command."""
|
||||
try:
|
||||
return await func()
|
||||
except UnauthorizedException as err:
|
||||
_LOGGER.debug("Unauthorized error from fitbit API: %s", err)
|
||||
raise FitbitAuthException("Authentication error from fitbit API") from err
|
||||
except ApiException as err:
|
||||
_LOGGER.debug("Error from fitbit API: %s", err)
|
||||
raise FitbitApiException("Error from fitbit API") from err
|
||||
except OpenApiException as err:
|
||||
_LOGGER.debug("Error communicating with fitbit API: %s", err)
|
||||
raise FitbitApiException("Communication error from fitbit API") from err
|
||||
|
||||
|
||||
class OAuthFitbitApi(FitbitApi):
|
||||
"""Provide fitbit authentication tied to an OAuth2 based config entry."""
|
||||
|
||||
@@ -6,6 +6,8 @@ import datetime
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from fitbit_web_api.models.device import Device
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
@@ -13,7 +15,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .api import FitbitApi
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,7 +24,7 @@ TIMEOUT = 10
|
||||
type FitbitConfigEntry = ConfigEntry[FitbitData]
|
||||
|
||||
|
||||
class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, Device]]):
|
||||
"""Coordinator for fetching fitbit devices from the API."""
|
||||
|
||||
config_entry: FitbitConfigEntry
|
||||
@@ -41,7 +42,7 @@ class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
)
|
||||
self._api = api
|
||||
|
||||
async def _async_update_data(self) -> dict[str, FitbitDevice]:
|
||||
async def _async_update_data(self) -> dict[str, Device]:
|
||||
"""Fetch data from API endpoint."""
|
||||
async with asyncio.timeout(TIMEOUT):
|
||||
try:
|
||||
@@ -50,7 +51,7 @@ class FitbitDeviceCoordinator(DataUpdateCoordinator[dict[str, FitbitDevice]]):
|
||||
raise ConfigEntryAuthFailed(err) from err
|
||||
except FitbitApiException as err:
|
||||
raise UpdateFailed(err) from err
|
||||
return {device.id: device for device in devices}
|
||||
return {device.id: device for device in devices if device.id is not None}
|
||||
|
||||
|
||||
@dataclass
|
||||
|
||||
@@ -6,6 +6,6 @@
|
||||
"dependencies": ["application_credentials", "http"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/fitbit",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["fitbit"],
|
||||
"requirements": ["fitbit==0.3.1"]
|
||||
"loggers": ["fitbit", "fitbit_web_api"],
|
||||
"requirements": ["fitbit==0.3.1", "fitbit-web-api==2.13.5"]
|
||||
}
|
||||
|
||||
@@ -21,26 +21,6 @@ class FitbitProfile:
|
||||
"""The locale defined in the user's Fitbit account settings."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitDevice:
|
||||
"""Device from the Fitbit API response."""
|
||||
|
||||
id: str
|
||||
"""The device ID."""
|
||||
|
||||
device_version: str
|
||||
"""The product name of the device."""
|
||||
|
||||
battery_level: int
|
||||
"""The battery level as a percentage."""
|
||||
|
||||
battery: str
|
||||
"""Returns the battery level of the device."""
|
||||
|
||||
type: str
|
||||
"""The type of the device such as TRACKER or SCALE."""
|
||||
|
||||
|
||||
@dataclass
|
||||
class FitbitConfig:
|
||||
"""Information from the fitbit ConfigEntry data."""
|
||||
|
||||
@@ -8,6 +8,8 @@ import datetime
|
||||
import logging
|
||||
from typing import Any, Final, cast
|
||||
|
||||
from fitbit_web_api.models.device import Device
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
@@ -32,7 +34,7 @@ from .api import FitbitApi
|
||||
from .const import ATTRIBUTION, BATTERY_LEVELS, DOMAIN, FitbitScope, FitbitUnitSystem
|
||||
from .coordinator import FitbitConfigEntry, FitbitDeviceCoordinator
|
||||
from .exceptions import FitbitApiException, FitbitAuthException
|
||||
from .model import FitbitDevice, config_from_entry_data
|
||||
from .model import config_from_entry_data
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
@@ -657,7 +659,7 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
coordinator: FitbitDeviceCoordinator,
|
||||
user_profile_id: str,
|
||||
description: FitbitSensorEntityDescription,
|
||||
device: FitbitDevice,
|
||||
device: Device,
|
||||
enable_default_override: bool,
|
||||
) -> None:
|
||||
"""Initialize the Fitbit sensor."""
|
||||
@@ -677,7 +679,9 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
@property
|
||||
def icon(self) -> str | None:
|
||||
"""Icon to use in the frontend, if any."""
|
||||
if battery_level := BATTERY_LEVELS.get(self.device.battery):
|
||||
if self.device.battery is not None and (
|
||||
battery_level := BATTERY_LEVELS.get(self.device.battery)
|
||||
):
|
||||
return icon_for_battery_level(battery_level=battery_level)
|
||||
return self.entity_description.icon
|
||||
|
||||
@@ -697,7 +701,7 @@ class FitbitBatterySensor(CoordinatorEntity[FitbitDeviceCoordinator], SensorEnti
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.device = self.coordinator.data[self.device.id]
|
||||
self.device = self.coordinator.data[cast(str, self.device.id)]
|
||||
self._attr_native_value = self.device.battery
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -715,7 +719,7 @@ class FitbitBatteryLevelSensor(
|
||||
coordinator: FitbitDeviceCoordinator,
|
||||
user_profile_id: str,
|
||||
description: FitbitSensorEntityDescription,
|
||||
device: FitbitDevice,
|
||||
device: Device,
|
||||
) -> None:
|
||||
"""Initialize the Fitbit sensor."""
|
||||
super().__init__(coordinator)
|
||||
@@ -736,6 +740,6 @@ class FitbitBatteryLevelSensor(
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self.device = self.coordinator.data[self.device.id]
|
||||
self.device = self.coordinator.data[cast(str, self.device.id)]
|
||||
self._attr_native_value = self.device.battery_level
|
||||
self.async_write_ha_state()
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251105.1"]
|
||||
"requirements": ["home-assistant-frontend==20251127.0"]
|
||||
}
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["google_air_quality_api"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["google_air_quality_api==1.1.1"]
|
||||
"requirements": ["google_air_quality_api==1.1.2"]
|
||||
}
|
||||
|
||||
@@ -132,7 +132,6 @@
|
||||
"heavily_polluted": "Heavily polluted",
|
||||
"heavy_air_pollution": "Heavy air pollution",
|
||||
"high_air_pollution": "High air pollution",
|
||||
"high_air_quality": "High air pollution",
|
||||
"high_health_risk": "High health risk",
|
||||
"horrible_air_quality": "Horrible air quality",
|
||||
"light_air_pollution": "Light air pollution",
|
||||
@@ -165,20 +164,18 @@
|
||||
"slightly_polluted": "Slightly polluted",
|
||||
"sufficient_air_quality": "Sufficient air quality",
|
||||
"unfavorable_air_quality": "Unfavorable air quality",
|
||||
"unfavorable_sensitive": "Unfavorable air quality for sensitive groups",
|
||||
"unfavorable_air_quality_for_sensitive_groups": "Unfavorable air quality for sensitive groups",
|
||||
"unhealthy_air_quality": "Unhealthy air quality",
|
||||
"unhealthy_sensitive": "Unhealthy air quality for sensitive groups",
|
||||
"unsatisfactory_air_quality": "Unsatisfactory air quality",
|
||||
"very_bad_air_quality": "Very bad air quality",
|
||||
"very_good_air_quality": "Very good air quality",
|
||||
"very_high_air_pollution": "Very high air pollution",
|
||||
"very_high_air_quality": "Very High air pollution",
|
||||
"very_high_health_risk": "Very high health risk",
|
||||
"very_low_air_pollution": "Very low air pollution",
|
||||
"very_polluted": "Very polluted",
|
||||
"very_poor_air_quality": "Very poor air quality",
|
||||
"very_unfavorable_air_quality": "Very unfavorable air quality",
|
||||
"very_unhealthy": "Very unhealthy air quality",
|
||||
"very_unhealthy_air_quality": "Very unhealthy air quality",
|
||||
"warning_air_pollution": "Warning level air pollution"
|
||||
}
|
||||
|
||||
@@ -53,7 +53,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import CONF_IGNORE_NON_NUMERIC, DOMAIN
|
||||
from .entity import GroupEntity
|
||||
@@ -374,7 +374,7 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Query all members and determine the sensor group state."""
|
||||
self.calculate_state_attributes(self._get_valid_entities())
|
||||
states: list[StateType] = []
|
||||
states: list[str] = []
|
||||
valid_units = self._valid_units
|
||||
valid_states: list[bool] = []
|
||||
sensor_values: list[tuple[str, float, State]] = []
|
||||
|
||||
@@ -17,7 +17,7 @@ from aiohasupervisor.models import GreenOptions, YellowOptions # noqa: F401
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.auth.const import GROUP_ID_ADMIN
|
||||
from homeassistant.components import panel_custom
|
||||
from homeassistant.components import frontend, panel_custom
|
||||
from homeassistant.components.homeassistant import async_set_stop_handler
|
||||
from homeassistant.components.http import StaticPathConfig
|
||||
from homeassistant.config_entries import SOURCE_SYSTEM, ConfigEntry
|
||||
@@ -329,6 +329,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # noqa:
|
||||
return False
|
||||
|
||||
async_load_websocket_api(hass)
|
||||
frontend.async_register_built_in_panel(hass, "app")
|
||||
|
||||
host = os.environ["SUPERVISOR"]
|
||||
websession = async_get_clientsession(hass)
|
||||
|
||||
@@ -211,7 +211,7 @@ async def ws_start_preview(
|
||||
|
||||
@callback
|
||||
def async_preview_updated(
|
||||
last_exception: Exception | None, state: str, attributes: Mapping[str, Any]
|
||||
last_exception: BaseException | None, state: str, attributes: Mapping[str, Any]
|
||||
) -> None:
|
||||
"""Forward config entry state events to websocket."""
|
||||
if last_exception:
|
||||
|
||||
@@ -241,7 +241,9 @@ class HistoryStatsSensor(HistoryStatsSensorBase):
|
||||
|
||||
async def async_start_preview(
|
||||
self,
|
||||
preview_callback: Callable[[Exception | None, str, Mapping[str, Any]], None],
|
||||
preview_callback: Callable[
|
||||
[BaseException | None, str, Mapping[str, Any]], None
|
||||
],
|
||||
) -> CALLBACK_TYPE:
|
||||
"""Render a preview."""
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ from random import random
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.labs import async_is_preview_feature_enabled, async_listen
|
||||
from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance
|
||||
from homeassistant.components.recorder.models import (
|
||||
StatisticData,
|
||||
@@ -30,7 +31,7 @@ from homeassistant.const import (
|
||||
UnitOfTemperature,
|
||||
UnitOfVolume,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, callback
|
||||
from homeassistant.core import HomeAssistant, ServiceCall, ServiceResponse, callback
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
@@ -38,7 +39,6 @@ from homeassistant.helpers.issue_registry import (
|
||||
async_create_issue,
|
||||
async_delete_issue,
|
||||
)
|
||||
from homeassistant.helpers.labs import async_is_preview_feature_enabled, async_listen
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
from homeassistant.util.unit_conversion import (
|
||||
@@ -81,11 +81,22 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
)
|
||||
|
||||
@callback
|
||||
def service_handler(call: ServiceCall | None = None) -> None:
|
||||
def service_handler(call: ServiceCall | None = None) -> ServiceResponse:
|
||||
"""Do nothing."""
|
||||
return None
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, "test_service_1", service_handler, SCHEMA_SERVICE_TEST_SERVICE_1
|
||||
DOMAIN,
|
||||
"test_service_1",
|
||||
service_handler,
|
||||
SCHEMA_SERVICE_TEST_SERVICE_1,
|
||||
description_placeholders={
|
||||
"meep_1": "foo",
|
||||
"meep_2": "bar",
|
||||
"meep_3": "beer",
|
||||
"meep_4": "milk",
|
||||
"meep_5": "https://example.com",
|
||||
},
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
@@ -117,14 +117,16 @@
|
||||
},
|
||||
"services": {
|
||||
"test_service_1": {
|
||||
"description": "Fake action for testing",
|
||||
"description": "Fake action for testing {meep_2}",
|
||||
"fields": {
|
||||
"field_1": {
|
||||
"description": "Number of seconds",
|
||||
"name": "Field 1"
|
||||
"description": "Number of seconds {meep_4}",
|
||||
"example": "Example: {meep_5}",
|
||||
"name": "Field 1 {meep_3}"
|
||||
},
|
||||
"field_2": {
|
||||
"description": "Mode",
|
||||
"example": "Field 2 example",
|
||||
"name": "Field 2"
|
||||
},
|
||||
"field_3": {
|
||||
@@ -136,7 +138,7 @@
|
||||
"name": "Field 4"
|
||||
}
|
||||
},
|
||||
"name": "Test action 1",
|
||||
"name": "Test action {meep_1}",
|
||||
"sections": {
|
||||
"advanced_fields": {
|
||||
"description": "Some very advanced things",
|
||||
|
||||
@@ -39,6 +39,10 @@ if TYPE_CHECKING:
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_DESCRIPTION_PLACEHOLDERS = {
|
||||
"sensor_value_types_url": "https://www.home-assistant.io/integrations/knx/#value-types"
|
||||
}
|
||||
|
||||
|
||||
@callback
|
||||
def async_setup_services(hass: HomeAssistant) -> None:
|
||||
@@ -48,6 +52,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_SEND,
|
||||
service_send_to_knx_bus,
|
||||
schema=SERVICE_KNX_SEND_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
hass.services.async_register(
|
||||
@@ -63,6 +68,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_EVENT_REGISTER,
|
||||
service_event_register_modify,
|
||||
schema=SERVICE_KNX_EVENT_REGISTER_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async_register_admin_service(
|
||||
@@ -71,6 +77,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_KNX_EXPOSURE_REGISTER,
|
||||
service_exposure_register_modify,
|
||||
schema=SERVICE_KNX_EXPOSURE_REGISTER_SCHEMA,
|
||||
description_placeholders=_DESCRIPTION_PLACEHOLDERS,
|
||||
)
|
||||
|
||||
async_register_admin_service(
|
||||
|
||||
@@ -674,7 +674,7 @@
|
||||
"name": "Remove event registration"
|
||||
},
|
||||
"type": {
|
||||
"description": "If set, the payload will be decoded as given DPT in the event data `value` key. KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
|
||||
"description": "If set, the payload will be decoded as given DPT in the event data `value` key. KNX sensor types are valid values (see {sensor_value_types_url}).",
|
||||
"name": "Value type"
|
||||
}
|
||||
},
|
||||
@@ -704,7 +704,7 @@
|
||||
"name": "Remove exposure"
|
||||
},
|
||||
"type": {
|
||||
"description": "Telegrams will be encoded as given DPT. 'binary' and all KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
|
||||
"description": "Telegrams will be encoded as given DPT. 'binary' and all KNX sensor types are valid values (see {sensor_value_types_url}).",
|
||||
"name": "Value type"
|
||||
}
|
||||
},
|
||||
@@ -740,7 +740,7 @@
|
||||
"name": "Send as Response"
|
||||
},
|
||||
"type": {
|
||||
"description": "If set, the payload will not be sent as raw bytes, but encoded as given DPT. KNX sensor types are valid values (see https://www.home-assistant.io/integrations/knx/#value-types).",
|
||||
"description": "If set, the payload will not be sent as raw bytes, but encoded as given DPT. KNX sensor types are valid values (see {sensor_value_types_url}).",
|
||||
"name": "Value type"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -7,24 +7,36 @@ in the Home Assistant Labs UI for users to enable or disable.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.core import Event, HomeAssistant, callback
|
||||
from homeassistant.generated.labs import LABS_PREVIEW_FEATURES
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.storage import Store
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import async_get_custom_components
|
||||
|
||||
from .const import DOMAIN, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .models import LabPreviewFeature, LabsData, LabsStoreData, NativeLabsStoreData
|
||||
from .const import DOMAIN, EVENT_LABS_UPDATED, LABS_DATA, STORAGE_KEY, STORAGE_VERSION
|
||||
from .models import (
|
||||
EventLabsUpdatedData,
|
||||
LabPreviewFeature,
|
||||
LabsData,
|
||||
LabsStoreData,
|
||||
NativeLabsStoreData,
|
||||
)
|
||||
from .websocket_api import async_setup as async_setup_ws_api
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
|
||||
__all__ = []
|
||||
__all__ = [
|
||||
"EVENT_LABS_UPDATED",
|
||||
"EventLabsUpdatedData",
|
||||
"async_is_preview_feature_enabled",
|
||||
"async_listen",
|
||||
]
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
@@ -122,3 +134,55 @@ async def _async_scan_all_preview_features(
|
||||
|
||||
_LOGGER.debug("Loaded %d total lab preview features", len(preview_features))
|
||||
return preview_features
|
||||
|
||||
|
||||
@callback
|
||||
def async_is_preview_feature_enabled(
|
||||
hass: HomeAssistant, domain: str, preview_feature: str
|
||||
) -> bool:
|
||||
"""Check if a lab preview feature is enabled.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
|
||||
Returns:
|
||||
True if the preview feature is enabled, False otherwise
|
||||
"""
|
||||
if LABS_DATA not in hass.data:
|
||||
return False
|
||||
|
||||
labs_data = hass.data[LABS_DATA]
|
||||
return (domain, preview_feature) in labs_data.data.preview_feature_status
|
||||
|
||||
|
||||
@callback
|
||||
def async_listen(
|
||||
hass: HomeAssistant,
|
||||
domain: str,
|
||||
preview_feature: str,
|
||||
listener: Callable[[], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Listen for changes to a specific preview feature.
|
||||
|
||||
Args:
|
||||
hass: HomeAssistant instance
|
||||
domain: Integration domain
|
||||
preview_feature: Preview feature name
|
||||
listener: Callback to invoke when the preview feature is toggled
|
||||
|
||||
Returns:
|
||||
Callable to unsubscribe from the listener
|
||||
"""
|
||||
|
||||
@callback
|
||||
def _async_feature_updated(event: Event[EventLabsUpdatedData]) -> None:
|
||||
"""Handle labs feature update event."""
|
||||
if (
|
||||
event.data["domain"] == domain
|
||||
and event.data["preview_feature"] == preview_feature
|
||||
):
|
||||
listener()
|
||||
|
||||
return hass.bus.async_listen(EVENT_LABS_UPDATED, _async_feature_updated)
|
||||
|
||||
@@ -11,4 +11,6 @@ DOMAIN = "labs"
|
||||
STORAGE_KEY = "core.labs"
|
||||
STORAGE_VERSION = 1
|
||||
|
||||
EVENT_LABS_UPDATED = "labs_updated"
|
||||
|
||||
LABS_DATA: HassKey[LabsData] = HassKey(DOMAIN)
|
||||
|
||||
@@ -9,6 +9,14 @@ if TYPE_CHECKING:
|
||||
from homeassistant.helpers.storage import Store
|
||||
|
||||
|
||||
class EventLabsUpdatedData(TypedDict):
|
||||
"""Event data for labs_updated event."""
|
||||
|
||||
domain: str
|
||||
preview_feature: str
|
||||
enabled: bool
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True, slots=True)
|
||||
class LabPreviewFeature:
|
||||
"""Lab preview feature definition."""
|
||||
|
||||
@@ -9,9 +9,9 @@ import voluptuous as vol
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.backup import async_get_manager
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.labs import EVENT_LABS_UPDATED, EventLabsUpdatedData
|
||||
|
||||
from .const import LABS_DATA
|
||||
from .const import EVENT_LABS_UPDATED, LABS_DATA
|
||||
from .models import EventLabsUpdatedData
|
||||
|
||||
|
||||
@callback
|
||||
|
||||
@@ -181,6 +181,16 @@ class LoggerSettings:
|
||||
"""Save settings."""
|
||||
self._store.async_delay_save(self._async_data_to_save, delay)
|
||||
|
||||
@callback
|
||||
def async_get_integration_domains(self) -> set[str]:
|
||||
"""Get domains that have integration-level log settings."""
|
||||
stored_log_config = self._stored_config[STORAGE_LOG_KEY]
|
||||
return {
|
||||
domain
|
||||
for domain, setting in stored_log_config.items()
|
||||
if setting.type == LogSettingsType.INTEGRATION
|
||||
}
|
||||
|
||||
@callback
|
||||
def _async_get_logger_logs(self) -> dict[str, int]:
|
||||
"""Get the logger logs."""
|
||||
|
||||
@@ -6,6 +6,7 @@ import voluptuous as vol
|
||||
|
||||
from homeassistant.components import websocket_api
|
||||
from homeassistant.components.websocket_api import ActiveConnection
|
||||
from homeassistant.config_entries import DISCOVERY_SOURCES
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.loader import IntegrationNotFound, async_get_integration
|
||||
from homeassistant.setup import async_get_loaded_integrations
|
||||
@@ -34,6 +35,16 @@ def handle_integration_log_info(
|
||||
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""Handle integrations logger info."""
|
||||
integrations = set(async_get_loaded_integrations(hass))
|
||||
|
||||
# Add discovered config flows that are not yet loaded
|
||||
for flow in hass.config_entries.flow.async_progress():
|
||||
if flow["context"].get("source") in DISCOVERY_SOURCES:
|
||||
integrations.add(flow["handler"])
|
||||
|
||||
# Add integrations with custom log settings
|
||||
integrations.update(hass.data[DATA_LOGGER].settings.async_get_integration_domains())
|
||||
|
||||
connection.send_result(
|
||||
msg["id"],
|
||||
[
|
||||
@@ -43,7 +54,7 @@ def handle_integration_log_info(
|
||||
f"homeassistant.components.{integration}"
|
||||
).getEffectiveLevel(),
|
||||
}
|
||||
for integration in async_get_loaded_integrations(hass)
|
||||
for integration in integrations
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@@ -485,4 +485,18 @@ DISCOVERY_SCHEMAS = [
|
||||
required_attributes=(clusters.RefrigeratorAlarm.Attributes.State,),
|
||||
allow_multi=True,
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.BINARY_SENSOR,
|
||||
entity_description=MatterBinarySensorEntityDescription(
|
||||
key="WindowCoveringConfigStatusOperational",
|
||||
device_class=BinarySensorDeviceClass.PROBLEM,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
# unset Operational bit from ConfigStatus bitmap means problem
|
||||
device_to_ha=lambda x: not bool(
|
||||
x & clusters.WindowCovering.Bitmaps.ConfigStatus.kOperational
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(clusters.WindowCovering.Attributes.ConfigStatus,),
|
||||
),
|
||||
]
|
||||
|
||||
@@ -1486,6 +1486,7 @@ class MqttEntity(
|
||||
entity_registry.async_update_entity(
|
||||
self.entity_id, new_entity_id=self._update_registry_entity_id
|
||||
)
|
||||
self._update_registry_entity_id = None
|
||||
|
||||
await super().async_added_to_hass()
|
||||
self._subscriptions = {}
|
||||
|
||||
@@ -729,8 +729,8 @@
|
||||
"data_description": {
|
||||
"payload_reset_percentage": "A special payload that resets the fan speed percentage state attribute to unknown when received at the percentage state topic.",
|
||||
"percentage_command_template": "A [template]({command_templating_url}) to compose the payload to be published at the percentage command topic.",
|
||||
"percentage_command_topic": "The MQTT topic to publish commands to change the fan speed state based on a percentage. [Learn more.]({url}#percentage_command_topic)",
|
||||
"percentage_state_topic": "The MQTT topic subscribed to receive fan speed based on percentage. [Learn more.]({url}#percentage_state_topic)",
|
||||
"percentage_command_topic": "The MQTT topic to publish commands to change the fan speed state based on a percentage setting. The value shall be in the range from \"speed range min\" to \"speed range max\". [Learn more.]({url}#percentage_command_topic)",
|
||||
"percentage_state_topic": "The MQTT topic subscribed to receive fan speed state. This is a value in the range from \"speed range min\" to \"speed range max\". [Learn more.]({url}#percentage_state_topic)",
|
||||
"percentage_value_template": "Defines a [template]({value_templating_url}) to extract the speed percentage value.",
|
||||
"speed_range_max": "The maximum of numeric output range (representing 100 %). The percentage step is 100 / number of speeds within the \"speed range\".",
|
||||
"speed_range_min": "The minimum of numeric output range (off not included, so speed_range_min - 1 represents 0 %). The percentage step is 100 / the number of speeds within the \"speed range\"."
|
||||
|
||||
@@ -8,16 +8,25 @@ from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_client.exceptions import CannotConnect, InvalidServerVersion
|
||||
from music_assistant_client.exceptions import (
|
||||
CannotConnect,
|
||||
InvalidServerVersion,
|
||||
MusicAssistantClientException,
|
||||
)
|
||||
from music_assistant_models.config_entries import PlayerConfig
|
||||
from music_assistant_models.enums import EventType
|
||||
from music_assistant_models.errors import ActionUnavailable, MusicAssistantError
|
||||
from music_assistant_models.errors import (
|
||||
ActionUnavailable,
|
||||
AuthenticationFailed,
|
||||
InvalidToken,
|
||||
MusicAssistantError,
|
||||
)
|
||||
from music_assistant_models.player import Player
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
|
||||
from homeassistant.const import CONF_URL, EVENT_HOMEASSISTANT_STOP, Platform
|
||||
from homeassistant.core import Event, HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv, device_registry as dr
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.issue_registry import (
|
||||
@@ -26,7 +35,7 @@ from homeassistant.helpers.issue_registry import (
|
||||
async_delete_issue,
|
||||
)
|
||||
|
||||
from .const import ATTR_CONF_EXPOSE_PLAYER_TO_HA, DOMAIN, LOGGER
|
||||
from .const import ATTR_CONF_EXPOSE_PLAYER_TO_HA, CONF_TOKEN, DOMAIN, LOGGER
|
||||
from .helpers import get_music_assistant_client
|
||||
from .services import register_actions
|
||||
|
||||
@@ -59,6 +68,7 @@ class MusicAssistantEntryData:
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Set up the Music Assistant component."""
|
||||
register_actions(hass)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
@@ -68,7 +78,9 @@ async def async_setup_entry( # noqa: C901
|
||||
"""Set up Music Assistant from a config entry."""
|
||||
http_session = async_get_clientsession(hass, verify_ssl=False)
|
||||
mass_url = entry.data[CONF_URL]
|
||||
mass = MusicAssistantClient(mass_url, http_session)
|
||||
# Get token from config entry (for schema >= AUTH_SCHEMA_VERSION)
|
||||
token = entry.data.get(CONF_TOKEN)
|
||||
mass = MusicAssistantClient(mass_url, http_session, token=token)
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(CONNECT_TIMEOUT):
|
||||
@@ -87,6 +99,14 @@ async def async_setup_entry( # noqa: C901
|
||||
translation_key="invalid_server_version",
|
||||
)
|
||||
raise ConfigEntryNotReady(f"Invalid server version: {err}") from err
|
||||
except (AuthenticationFailed, InvalidToken) as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Authentication failed for {mass_url}: {err}"
|
||||
) from err
|
||||
except MusicAssistantClientException as err:
|
||||
raise ConfigEntryNotReady(
|
||||
f"Failed to connect to music assistant server {mass_url}: {err}"
|
||||
) from err
|
||||
except MusicAssistantError as err:
|
||||
LOGGER.exception("Failed to connect to music assistant server", exc_info=err)
|
||||
raise ConfigEntryNotReady(
|
||||
|
||||
@@ -2,40 +2,79 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Mapping
|
||||
from typing import TYPE_CHECKING, Any
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from music_assistant_client import MusicAssistantClient
|
||||
from music_assistant_client.auth_helpers import create_long_lived_token, get_server_info
|
||||
from music_assistant_client.exceptions import (
|
||||
CannotConnect,
|
||||
InvalidServerVersion,
|
||||
MusicAssistantClientException,
|
||||
)
|
||||
from music_assistant_models.api import ServerInfoMessage
|
||||
from music_assistant_models.errors import AuthenticationFailed, InvalidToken
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.config_entries import SOURCE_REAUTH, ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_URL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import aiohttp_client
|
||||
from homeassistant.helpers.config_entry_oauth2_flow import (
|
||||
_encode_jwt,
|
||||
async_get_redirect_uri,
|
||||
)
|
||||
from homeassistant.helpers.service_info.hassio import HassioServiceInfo
|
||||
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
|
||||
|
||||
from .const import DOMAIN, LOGGER
|
||||
from .const import (
|
||||
AUTH_SCHEMA_VERSION,
|
||||
CONF_TOKEN,
|
||||
DOMAIN,
|
||||
HASSIO_DISCOVERY_SCHEMA_VERSION,
|
||||
LOGGER,
|
||||
)
|
||||
|
||||
DEFAULT_TITLE = "Music Assistant"
|
||||
DEFAULT_URL = "http://mass.local:8095"
|
||||
|
||||
|
||||
STEP_USER_SCHEMA = vol.Schema({vol.Required(CONF_URL): str})
|
||||
STEP_AUTH_TOKEN_SCHEMA = vol.Schema({vol.Required(CONF_TOKEN): str})
|
||||
|
||||
|
||||
def _parse_zeroconf_server_info(properties: dict[str, str]) -> ServerInfoMessage:
|
||||
"""Parse zeroconf properties to ServerInfoMessage."""
|
||||
|
||||
return ServerInfoMessage(
|
||||
server_id=properties["server_id"],
|
||||
server_version=properties["server_version"],
|
||||
schema_version=int(properties["schema_version"]),
|
||||
min_supported_schema_version=int(properties["min_supported_schema_version"]),
|
||||
base_url=properties["base_url"],
|
||||
homeassistant_addon=properties["homeassistant_addon"].lower() == "true",
|
||||
onboard_done=properties["onboard_done"].lower() == "true",
|
||||
)
|
||||
|
||||
|
||||
async def _get_server_info(hass: HomeAssistant, url: str) -> ServerInfoMessage:
|
||||
"""Validate the user input allows us to connect."""
|
||||
"""Get MA server info for the given URL."""
|
||||
session = aiohttp_client.async_get_clientsession(hass)
|
||||
return await get_server_info(server_url=url, aiohttp_session=session)
|
||||
|
||||
|
||||
async def _test_connection(hass: HomeAssistant, url: str, token: str) -> None:
|
||||
"""Test connection to MA server with given URL and token."""
|
||||
session = aiohttp_client.async_get_clientsession(hass)
|
||||
async with MusicAssistantClient(
|
||||
url, aiohttp_client.async_get_clientsession(hass)
|
||||
server_url=url,
|
||||
aiohttp_session=session,
|
||||
token=token,
|
||||
) as client:
|
||||
if TYPE_CHECKING:
|
||||
assert client.server_info is not None
|
||||
return client.server_info
|
||||
# Just executing any command to test the connection.
|
||||
# If auth is required and the token is invalid, this will raise.
|
||||
await client.send_command("info")
|
||||
|
||||
|
||||
class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
@@ -46,16 +85,18 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
def __init__(self) -> None:
|
||||
"""Set up flow instance."""
|
||||
self.url: str | None = None
|
||||
self.token: str | None = None
|
||||
self.server_info: ServerInfoMessage | None = None
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a manual configuration."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self.url = user_input[CONF_URL]
|
||||
try:
|
||||
server_info = await _get_server_info(self.hass, user_input[CONF_URL])
|
||||
server_info = await _get_server_info(self.hass, self.url)
|
||||
except CannotConnect:
|
||||
errors["base"] = "cannot_connect"
|
||||
except InvalidServerVersion:
|
||||
@@ -64,16 +105,21 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
self.server_info = server_info
|
||||
await self.async_set_unique_id(
|
||||
server_info.server_id, raise_on_progress=False
|
||||
)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_URL: user_input[CONF_URL]}
|
||||
)
|
||||
self._abort_if_unique_id_configured(updates={CONF_URL: self.url})
|
||||
|
||||
# Check if authentication is required for this server
|
||||
if server_info.schema_version >= AUTH_SCHEMA_VERSION:
|
||||
# Redirect to browser-based authentication
|
||||
return await self.async_step_auth()
|
||||
|
||||
# Old server, no auth needed
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={CONF_URL: user_input[CONF_URL]},
|
||||
data={CONF_URL: self.url},
|
||||
)
|
||||
|
||||
suggested_values = user_input
|
||||
@@ -88,16 +134,87 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_hassio(
|
||||
self, discovery_info: HassioServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle Home Assistant add-on discovery.
|
||||
|
||||
This flow is triggered by the Music Assistant add-on.
|
||||
"""
|
||||
# Build URL from add-on discovery info
|
||||
# The add-on exposes the API on port 8095, but also hosts an internal-only
|
||||
# webserver (default at port 8094) for the Home Assistant integration to connect to.
|
||||
# The info where the internal API is exposed is passed via discovery_info
|
||||
host = discovery_info.config["host"]
|
||||
port = discovery_info.config["port"]
|
||||
self.url = f"http://{host}:{port}"
|
||||
try:
|
||||
server_info = await _get_server_info(self.hass, self.url)
|
||||
except CannotConnect:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except InvalidServerVersion:
|
||||
return self.async_abort(reason="invalid_server_version")
|
||||
except MusicAssistantClientException:
|
||||
LOGGER.exception("Unexpected exception during add-on discovery")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
if not server_info.onboard_done:
|
||||
return self.async_abort(reason="server_not_ready")
|
||||
|
||||
# We trust the token from hassio discovery and validate it during setup
|
||||
self.token = discovery_info.config["auth_token"]
|
||||
|
||||
self.server_info = server_info
|
||||
await self.async_set_unique_id(server_info.server_id)
|
||||
self._abort_if_unique_id_configured(
|
||||
updates={CONF_URL: self.url, CONF_TOKEN: self.token}
|
||||
)
|
||||
|
||||
return await self.async_step_hassio_confirm()
|
||||
|
||||
async def async_step_hassio_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm the add-on discovery."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.url is not None
|
||||
|
||||
if user_input is not None:
|
||||
data = {CONF_URL: self.url}
|
||||
if self.token:
|
||||
data[CONF_TOKEN] = self.token
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data=data,
|
||||
)
|
||||
|
||||
self._set_confirm_only()
|
||||
return self.async_show_form(step_id="hassio_confirm")
|
||||
|
||||
async def async_step_zeroconf(
|
||||
self, discovery_info: ZeroconfServiceInfo
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle a zeroconf discovery for a Music Assistant server."""
|
||||
try:
|
||||
server_info = ServerInfoMessage.from_dict(discovery_info.properties)
|
||||
except LookupError:
|
||||
# Parse zeroconf properties (strings) to ServerInfoMessage
|
||||
server_info = _parse_zeroconf_server_info(discovery_info.properties)
|
||||
except (LookupError, KeyError, ValueError):
|
||||
return self.async_abort(reason="invalid_discovery_info")
|
||||
|
||||
if server_info.schema_version >= HASSIO_DISCOVERY_SCHEMA_VERSION:
|
||||
# Ignore servers running as Home Assistant add-on
|
||||
# (they should be discovered through hassio discovery instead)
|
||||
if server_info.homeassistant_addon:
|
||||
LOGGER.debug("Ignoring add-on server in zeroconf discovery")
|
||||
return self.async_abort(reason="already_discovered_addon")
|
||||
|
||||
# Ignore servers that have not completed onboarding yet
|
||||
if not server_info.onboard_done:
|
||||
LOGGER.debug("Ignoring server that hasn't completed onboarding")
|
||||
return self.async_abort(reason="server_not_ready")
|
||||
|
||||
self.url = server_info.base_url
|
||||
self.server_info = server_info
|
||||
|
||||
await self.async_set_unique_id(server_info.server_id)
|
||||
self._abort_if_unique_id_configured(updates={CONF_URL: self.url})
|
||||
@@ -115,8 +232,15 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle user-confirmation of discovered server."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.url is not None
|
||||
assert self.server_info is not None
|
||||
|
||||
if user_input is not None:
|
||||
# Check if authentication is required for this server
|
||||
if self.server_info.schema_version >= AUTH_SCHEMA_VERSION:
|
||||
# Redirect to browser-based authentication
|
||||
return await self.async_step_auth()
|
||||
|
||||
# Old server, no auth needed
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={CONF_URL: self.url},
|
||||
@@ -127,3 +251,152 @@ class MusicAssistantConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
step_id="discovery_confirm",
|
||||
description_placeholders={"url": self.url},
|
||||
)
|
||||
|
||||
async def async_step_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle authentication via redirect to MA login."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.url is not None
|
||||
|
||||
# Check if we're returning from the external auth step with a token
|
||||
if user_input is not None:
|
||||
if "error" in user_input:
|
||||
return self.async_abort(reason="auth_error")
|
||||
# OAuth2 callback sends token as "code" parameter
|
||||
if "code" in user_input:
|
||||
self.token = user_input["code"]
|
||||
return self.async_external_step_done(next_step_id="finish_auth")
|
||||
|
||||
# Check if we can use external auth (redirect flow)
|
||||
try:
|
||||
redirect_uri = async_get_redirect_uri(self.hass)
|
||||
except RuntimeError:
|
||||
# No current request context or missing required headers
|
||||
return await self.async_step_auth_manual()
|
||||
|
||||
# Use OAuth2 callback URL with JWT-encoded state
|
||||
state = _encode_jwt(
|
||||
self.hass, {"flow_id": self.flow_id, "redirect_uri": redirect_uri}
|
||||
)
|
||||
# Music Assistant server will redirect to: {redirect_uri}?state={state}&code={token}
|
||||
params = urlencode(
|
||||
{
|
||||
"return_url": f"{redirect_uri}?state={state}",
|
||||
"device_name": "Home Assistant",
|
||||
}
|
||||
)
|
||||
login_url = f"{self.url}/login?{params}"
|
||||
return self.async_external_step(step_id="auth", url=login_url)
|
||||
|
||||
async def async_step_finish_auth(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Finish authentication after receiving token."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.url is not None
|
||||
assert self.token is not None
|
||||
|
||||
# Exchange session token for long-lived token
|
||||
# The login flow gives us a session token (short expiration)
|
||||
session = aiohttp_client.async_get_clientsession(self.hass)
|
||||
|
||||
try:
|
||||
LOGGER.debug("Creating long-lived token")
|
||||
long_lived_token = await create_long_lived_token(
|
||||
self.url,
|
||||
self.token,
|
||||
"Home Assistant",
|
||||
aiohttp_session=session,
|
||||
)
|
||||
LOGGER.debug("Successfully created long-lived token")
|
||||
except (TimeoutError, CannotConnect):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except (AuthenticationFailed, InvalidToken) as err:
|
||||
LOGGER.error("Authentication failed: %s", err)
|
||||
return self.async_abort(reason="auth_failed")
|
||||
except InvalidServerVersion as err:
|
||||
LOGGER.error("Invalid server version: %s", err)
|
||||
return self.async_abort(reason="invalid_server_version")
|
||||
except MusicAssistantClientException:
|
||||
LOGGER.exception("Unexpected exception during connection test")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
if self.source == SOURCE_REAUTH:
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data={CONF_URL: self.url, CONF_TOKEN: long_lived_token},
|
||||
)
|
||||
|
||||
# Connection has been validated by creating a long-lived token
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={CONF_URL: self.url, CONF_TOKEN: long_lived_token},
|
||||
)
|
||||
|
||||
async def async_step_auth_manual(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle manual token entry as fallback."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.url is not None
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
if user_input is not None:
|
||||
self.token = user_input[CONF_TOKEN]
|
||||
try:
|
||||
# Test the connection with the provided token
|
||||
await _test_connection(self.hass, self.url, self.token)
|
||||
except CannotConnect:
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except InvalidServerVersion:
|
||||
return self.async_abort(reason="invalid_server_version")
|
||||
except (AuthenticationFailed, InvalidToken):
|
||||
errors["base"] = "auth_failed"
|
||||
except MusicAssistantClientException:
|
||||
LOGGER.exception("Unexpected exception during manual auth")
|
||||
return self.async_abort(reason="unknown")
|
||||
else:
|
||||
if self.source == SOURCE_REAUTH:
|
||||
return self.async_update_reload_and_abort(
|
||||
self._get_reauth_entry(),
|
||||
data={CONF_URL: self.url, CONF_TOKEN: self.token},
|
||||
)
|
||||
|
||||
return self.async_create_entry(
|
||||
title=DEFAULT_TITLE,
|
||||
data={CONF_URL: self.url, CONF_TOKEN: self.token},
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="auth_manual",
|
||||
data_schema=vol.Schema({vol.Required(CONF_TOKEN): str}),
|
||||
description_placeholders={"url": self.url},
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reauth when token is invalid or expired."""
|
||||
self.url = entry_data[CONF_URL]
|
||||
# Show confirmation before redirecting to auth
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
if TYPE_CHECKING:
|
||||
assert self.url is not None
|
||||
|
||||
if user_input is not None:
|
||||
# Redirect to auth flow
|
||||
return await self.async_step_auth()
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
description_placeholders={"url": self.url},
|
||||
)
|
||||
|
||||
@@ -7,6 +7,13 @@ DOMAIN_EVENT = f"{DOMAIN}_event"
|
||||
|
||||
DEFAULT_NAME = "Music Assistant"
|
||||
|
||||
# Schema version where mandatory authentication was added to the MA webserver
|
||||
AUTH_SCHEMA_VERSION = 28
|
||||
# Schema version where hassio discovery support was added
|
||||
HASSIO_DISCOVERY_SCHEMA_VERSION = 28
|
||||
|
||||
CONF_TOKEN = "token"
|
||||
|
||||
ATTR_IS_GROUP = "is_group"
|
||||
ATTR_GROUP_MEMBERS = "group_members"
|
||||
ATTR_GROUP_PARENTS = "group_parents"
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
{
|
||||
"domain": "music_assistant",
|
||||
"name": "Music Assistant",
|
||||
"after_dependencies": ["media_source", "media_player"],
|
||||
"after_dependencies": ["media_source"],
|
||||
"codeowners": ["@music-assistant", "@arturpragacz"],
|
||||
"config_flow": true,
|
||||
"dependencies": ["auth"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/music_assistant",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["music_assistant"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["music-assistant-client==1.3.2"],
|
||||
"zeroconf": ["_mass._tcp.local."]
|
||||
}
|
||||
|
||||
64
homeassistant/components/music_assistant/quality_scale.yaml
Normal file
64
homeassistant/components/music_assistant/quality_scale.yaml
Normal file
@@ -0,0 +1,64 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup: done
|
||||
appropriate-polling:
|
||||
status: exempt
|
||||
comment: Integration is local push
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions: done
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions: todo
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: todo
|
||||
parallel-updates: todo
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: Devices don't require authentication
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: done
|
||||
discovery: done
|
||||
docs-data-update: todo
|
||||
docs-examples: done
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: done
|
||||
dynamic-devices: done
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: done
|
||||
stale-devices: done
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: done
|
||||
@@ -3,20 +3,41 @@
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
|
||||
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"reconfiguration_successful": "Successfully reconfigured the Music Assistant integration.",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"auth_error": "Authentication error, please try again",
|
||||
"auth_failed": "Authentication failed, please try again",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_server_version": "The Music Assistant server is not the correct version",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"error": {
|
||||
"auth_failed": "[%key:component::music_assistant::config::abort::auth_failed%]",
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_server_version": "[%key:component::music_assistant::config::abort::invalid_server_version%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"auth_manual": {
|
||||
"data": {
|
||||
"token": "Long-lived access token"
|
||||
},
|
||||
"data_description": {
|
||||
"token": "Create a long-lived access token in your Music Assistant server settings and paste it here"
|
||||
},
|
||||
"title": "Enter long-lived access token"
|
||||
},
|
||||
"discovery_confirm": {
|
||||
"description": "Do you want to add the Music Assistant server `{url}` to Home Assistant?",
|
||||
"title": "Discovered Music Assistant server"
|
||||
},
|
||||
"hassio_confirm": {
|
||||
"description": "Do you want to add the Music Assistant server to Home Assistant?",
|
||||
"title": "Discovered Music Assistant add-on"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"description": "The authentication token for Music Assistant server `{url}` is no longer valid. Please re-authenticate to continue using the integration.",
|
||||
"title": "Reauthentication required"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"url": "[%key:common::config_flow::data::url%]"
|
||||
|
||||
@@ -19,6 +19,7 @@ from google_nest_sdm.exceptions import (
|
||||
ConfigurationException,
|
||||
DecodeException,
|
||||
SubscriberException,
|
||||
SubscriberTimeoutException,
|
||||
)
|
||||
from google_nest_sdm.traits import TraitType
|
||||
import voluptuous as vol
|
||||
@@ -203,10 +204,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool
|
||||
await auth.async_get_access_token()
|
||||
except ClientResponseError as err:
|
||||
if 400 <= err.status < 500:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
raise ConfigEntryNotReady from err
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN, translation_key="reauth_required"
|
||||
) from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN, translation_key="auth_server_error"
|
||||
) from err
|
||||
except ClientError as err:
|
||||
raise ConfigEntryNotReady from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN, translation_key="auth_client_error"
|
||||
) from err
|
||||
|
||||
subscriber = await api.new_subscriber(hass, entry, auth)
|
||||
if not subscriber:
|
||||
@@ -227,19 +234,32 @@ async def async_setup_entry(hass: HomeAssistant, entry: NestConfigEntry) -> bool
|
||||
unsub = await subscriber.start_async()
|
||||
except AuthException as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
f"Subscriber authentication error: {err!s}"
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="reauth_required",
|
||||
) from err
|
||||
except ConfigurationException as err:
|
||||
_LOGGER.error("Configuration error: %s", err)
|
||||
return False
|
||||
except SubscriberTimeoutException as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="subscriber_timeout",
|
||||
) from err
|
||||
except SubscriberException as err:
|
||||
raise ConfigEntryNotReady(f"Subscriber error: {err!s}") from err
|
||||
_LOGGER.error("Subscriber error: %s", err)
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="subscriber_error",
|
||||
) from err
|
||||
|
||||
try:
|
||||
device_manager = await subscriber.async_get_device_manager()
|
||||
except ApiException as err:
|
||||
unsub()
|
||||
raise ConfigEntryNotReady(f"Device manager error: {err!s}") from err
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="device_api_error",
|
||||
) from err
|
||||
|
||||
@callback
|
||||
def on_hass_stop(_: Event) -> None:
|
||||
|
||||
@@ -23,12 +23,7 @@ rules:
|
||||
entity-unique-id: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: todo
|
||||
test-before-setup:
|
||||
status: todo
|
||||
comment: |
|
||||
The integration does tests on setup, however the most common issues
|
||||
observed are related to ipv6 misconfigurations and the error messages
|
||||
are not self explanatory and can be improved.
|
||||
test-before-setup: done
|
||||
docs-high-level-description: done
|
||||
config-flow-test-coverage: done
|
||||
docs-actions: done
|
||||
|
||||
@@ -131,6 +131,26 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_client_error": {
|
||||
"message": "Client error during authentication, please check your network connection."
|
||||
},
|
||||
"auth_server_error": {
|
||||
"message": "Error response from authentication server, please see logs for details."
|
||||
},
|
||||
"device_api_error": {
|
||||
"message": "Error communicating with the Device Access API, please see logs for details."
|
||||
},
|
||||
"reauth_required": {
|
||||
"message": "Reauthentication is required, please follow the instructions in the UI to reauthenticate your account."
|
||||
},
|
||||
"subscriber_error": {
|
||||
"message": "Subscriber failed to connect to Google, please see logs for details."
|
||||
},
|
||||
"subscriber_timeout": {
|
||||
"message": "Subscriber timed out while attempting to connect to Google. Please check your network connection and IPv6 configuration if applicable."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"subscription_name": {
|
||||
"options": {
|
||||
|
||||
@@ -432,7 +432,7 @@ class NumberDeviceClass(StrEnum):
|
||||
|
||||
Unit of measurement: UnitOfVolumeFlowRate
|
||||
- SI / metric: `m³/h`, `m³/min`, `m³/s`, `L/h`, `L/min`, `L/s`, `mL/s`
|
||||
- USCS / imperial: `ft³/min`, `gal/min`
|
||||
- USCS / imperial: `ft³/min`, `gal/min`, `gal/d`
|
||||
"""
|
||||
|
||||
WATER = "water"
|
||||
|
||||
@@ -237,7 +237,13 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
await gw_hub.gateway.set_gpio_mode(gpio_id, gpio_mode)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SET_GPIO_MODE, set_gpio_mode, service_set_gpio_mode_schema
|
||||
DOMAIN,
|
||||
SERVICE_SET_GPIO_MODE,
|
||||
set_gpio_mode,
|
||||
service_set_gpio_mode_schema,
|
||||
description_placeholders={
|
||||
"gpio_modes_documentation_url": "https://www.home-assistant.io/integrations/opentherm_gw/#gpio-modes"
|
||||
},
|
||||
)
|
||||
|
||||
async def set_led_mode(call: ServiceCall) -> None:
|
||||
@@ -248,7 +254,13 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
await gw_hub.gateway.set_led_mode(led_id, led_mode)
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SET_LED_MODE, set_led_mode, service_set_led_mode_schema
|
||||
DOMAIN,
|
||||
SERVICE_SET_LED_MODE,
|
||||
set_led_mode,
|
||||
service_set_led_mode_schema,
|
||||
description_placeholders={
|
||||
"led_modes_documentation_url": "https://www.home-assistant.io/integrations/opentherm_gw/#led-modes"
|
||||
},
|
||||
)
|
||||
|
||||
async def set_max_mod(call: ServiceCall) -> None:
|
||||
@@ -294,4 +306,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
|
||||
SERVICE_SEND_TRANSP_CMD,
|
||||
send_transparent_cmd,
|
||||
service_send_transp_cmd_schema,
|
||||
description_placeholders={
|
||||
"opentherm_gateway_firmware_url": "https://otgw.tclcode.com/firmware.html"
|
||||
},
|
||||
)
|
||||
|
||||
@@ -386,7 +386,7 @@
|
||||
"name": "Reset gateway"
|
||||
},
|
||||
"send_transparent_command": {
|
||||
"description": "Sends custom otgw commands (https://otgw.tclcode.com/firmware.html) through a transparent interface.",
|
||||
"description": "Sends custom OTGW commands ({opentherm_gateway_firmware_url}) through a transparent interface.",
|
||||
"fields": {
|
||||
"gateway_id": {
|
||||
"description": "[%key:component::opentherm_gw::services::reset_gateway::fields::gateway_id::description%]",
|
||||
@@ -461,7 +461,7 @@
|
||||
"name": "ID"
|
||||
},
|
||||
"mode": {
|
||||
"description": "Mode to set on the GPIO pin. Values 0 through 6 are accepted for both GPIOs, 7 is only accepted for GPIO \"B\". See https://www.home-assistant.io/integrations/opentherm_gw/#gpio-modes for an explanation of the values.",
|
||||
"description": "Mode to set on the GPIO pin. Values 0 through 6 are accepted for both GPIOs, 7 is only accepted for GPIO \"B\". See {gpio_modes_documentation_url} for an explanation of the values.",
|
||||
"name": "[%key:common::config_flow::data::mode%]"
|
||||
}
|
||||
},
|
||||
@@ -507,7 +507,7 @@
|
||||
"name": "ID"
|
||||
},
|
||||
"mode": {
|
||||
"description": "The function to assign to the LED. See https://www.home-assistant.io/integrations/opentherm_gw/#led-modes for an explanation of the values.",
|
||||
"description": "The function to assign to the LED. See {led_modes_documentation_url} for an explanation of the values.",
|
||||
"name": "[%key:common::config_flow::data::mode%]"
|
||||
}
|
||||
},
|
||||
|
||||
@@ -17,7 +17,7 @@ from .coordinator import PooldoseConfigEntry, PooldoseCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR]
|
||||
PLATFORMS: list[Platform] = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH]
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: PooldoseConfigEntry) -> bool:
|
||||
|
||||
@@ -120,6 +120,29 @@
|
||||
"ph_type_dosing": {
|
||||
"default": "mdi:beaker"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"frequency_input": {
|
||||
"default": "mdi:sine-wave",
|
||||
"state": {
|
||||
"off": "mdi:pulse",
|
||||
"on": "mdi:sine-wave"
|
||||
}
|
||||
},
|
||||
"pause_dosing": {
|
||||
"default": "mdi:pause",
|
||||
"state": {
|
||||
"off": "mdi:play",
|
||||
"on": "mdi:pause"
|
||||
}
|
||||
},
|
||||
"pump_monitoring": {
|
||||
"default": "mdi:pump",
|
||||
"state": {
|
||||
"off": "mdi:pump-off",
|
||||
"on": "mdi:pump"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -11,5 +11,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/pooldose",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["python-pooldose==0.7.8"]
|
||||
"requirements": ["python-pooldose==0.8.0"]
|
||||
}
|
||||
|
||||
@@ -161,6 +161,17 @@
|
||||
"alcalyne": "pH+"
|
||||
}
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"frequency_input": {
|
||||
"name": "Frequency input"
|
||||
},
|
||||
"pause_dosing": {
|
||||
"name": "Pause dosing"
|
||||
},
|
||||
"pump_monitoring": {
|
||||
"name": "Pump monitoring"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
95
homeassistant/components/pooldose/switch.py
Normal file
95
homeassistant/components/pooldose/switch.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Switches for the Seko PoolDose integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import TYPE_CHECKING, Any, cast
|
||||
|
||||
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import PooldoseConfigEntry
|
||||
from .entity import PooldoseEntity
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .coordinator import PooldoseCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
SWITCH_DESCRIPTIONS: tuple[SwitchEntityDescription, ...] = (
|
||||
SwitchEntityDescription(
|
||||
key="pause_dosing",
|
||||
translation_key="pause_dosing",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key="pump_monitoring",
|
||||
translation_key="pump_monitoring",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
SwitchEntityDescription(
|
||||
key="frequency_input",
|
||||
translation_key="frequency_input",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: PooldoseConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up PoolDose switch entities from a config entry."""
|
||||
if TYPE_CHECKING:
|
||||
assert config_entry.unique_id is not None
|
||||
|
||||
coordinator = config_entry.runtime_data
|
||||
switch_data = coordinator.data["switch"]
|
||||
serial_number = config_entry.unique_id
|
||||
|
||||
async_add_entities(
|
||||
PooldoseSwitch(coordinator, serial_number, coordinator.device_info, description)
|
||||
for description in SWITCH_DESCRIPTIONS
|
||||
if description.key in switch_data
|
||||
)
|
||||
|
||||
|
||||
class PooldoseSwitch(PooldoseEntity, SwitchEntity):
|
||||
"""Switch entity for the Seko PoolDose Python API."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: PooldoseCoordinator,
|
||||
serial_number: str,
|
||||
device_info: Any,
|
||||
description: SwitchEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the switch."""
|
||||
super().__init__(coordinator, serial_number, device_info, description, "switch")
|
||||
self._async_update_attrs()
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._async_update_attrs()
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
def _async_update_attrs(self) -> None:
|
||||
"""Update switch attributes."""
|
||||
data = cast(dict, self.get_data())
|
||||
self._attr_is_on = cast(bool, data["value"])
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch on."""
|
||||
await self.coordinator.client.set_switch(self.entity_description.key, True)
|
||||
self._attr_is_on = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the switch off."""
|
||||
await self.coordinator.client.set_switch(self.entity_description.key, False)
|
||||
self._attr_is_on = False
|
||||
self.async_write_ha_state()
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["renault_api"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["renault-api==0.5.0"]
|
||||
"requirements": ["renault-api==0.5.1"]
|
||||
}
|
||||
|
||||
@@ -19,5 +19,5 @@
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["reolink_aio"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["reolink-aio==0.16.5"]
|
||||
"requirements": ["reolink-aio==0.16.6"]
|
||||
}
|
||||
|
||||
@@ -9,16 +9,14 @@ import logging
|
||||
from typing import Any
|
||||
|
||||
from roborock import (
|
||||
HomeDataRoom,
|
||||
RoborockException,
|
||||
RoborockInvalidCredentials,
|
||||
RoborockInvalidUserAgreement,
|
||||
RoborockNoUserAgreement,
|
||||
)
|
||||
from roborock.data import DeviceData, HomeDataDevice, HomeDataProduct, UserData
|
||||
from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1
|
||||
from roborock.version_a01_apis import RoborockMqttClientA01
|
||||
from roborock.web_api import RoborockApiClient
|
||||
from roborock.data import UserData
|
||||
from roborock.devices.device import RoborockDevice
|
||||
from roborock.devices.device_manager import UserParams, create_device_manager
|
||||
|
||||
from homeassistant.const import CONF_USERNAME, EVENT_HOMEASSISTANT_STOP
|
||||
from homeassistant.core import HomeAssistant
|
||||
@@ -32,8 +30,10 @@ from .coordinator import (
|
||||
RoborockCoordinators,
|
||||
RoborockDataUpdateCoordinator,
|
||||
RoborockDataUpdateCoordinatorA01,
|
||||
RoborockWashingMachineUpdateCoordinator,
|
||||
RoborockWetDryVacUpdateCoordinator,
|
||||
)
|
||||
from .roborock_storage import async_remove_map_storage
|
||||
from .roborock_storage import CacheStore, async_cleanup_map_storage
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
@@ -42,16 +42,21 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> bool:
|
||||
"""Set up roborock from a config entry."""
|
||||
await async_cleanup_map_storage(hass, entry.entry_id)
|
||||
|
||||
user_data = UserData.from_dict(entry.data[CONF_USER_DATA])
|
||||
api_client = RoborockApiClient(
|
||||
entry.data[CONF_USERNAME],
|
||||
entry.data[CONF_BASE_URL],
|
||||
session=async_get_clientsession(hass),
|
||||
user_params = UserParams(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
user_data=user_data,
|
||||
base_url=entry.data[CONF_BASE_URL],
|
||||
)
|
||||
_LOGGER.debug("Getting home data")
|
||||
cache = CacheStore(hass, entry.entry_id)
|
||||
try:
|
||||
home_data = await api_client.get_home_data_v3(user_data)
|
||||
device_manager = await create_device_manager(
|
||||
user_params,
|
||||
cache=cache,
|
||||
session=async_get_clientsession(hass),
|
||||
)
|
||||
except RoborockInvalidCredentials as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
"Invalid credentials",
|
||||
@@ -75,29 +80,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="home_data_fail",
|
||||
) from err
|
||||
devices = await device_manager.get_devices()
|
||||
_LOGGER.debug("Device manager found %d devices", len(devices))
|
||||
for device in devices:
|
||||
entry.async_on_unload(device.close)
|
||||
|
||||
_LOGGER.debug("Got home data %s", home_data)
|
||||
all_devices: list[HomeDataDevice] = home_data.devices + home_data.received_devices
|
||||
device_map: dict[str, HomeDataDevice] = {
|
||||
device.duid: device for device in all_devices
|
||||
}
|
||||
product_info: dict[str, HomeDataProduct] = {
|
||||
product.id: product for product in home_data.products
|
||||
}
|
||||
# Get a Coordinator if the device is available or if we have connected to the device before
|
||||
coordinators = await asyncio.gather(
|
||||
*build_setup_functions(
|
||||
hass,
|
||||
entry,
|
||||
device_map,
|
||||
user_data,
|
||||
product_info,
|
||||
home_data.rooms,
|
||||
api_client,
|
||||
),
|
||||
*build_setup_functions(hass, entry, devices, user_data),
|
||||
return_exceptions=True,
|
||||
)
|
||||
# Valid coordinators are those where we had networking cached or we could get networking
|
||||
v1_coords = [
|
||||
coord
|
||||
for coord in coordinators
|
||||
@@ -115,9 +106,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
translation_key="no_coordinators",
|
||||
)
|
||||
valid_coordinators = RoborockCoordinators(v1_coords, a01_coords)
|
||||
await asyncio.gather(
|
||||
*(coord.refresh_coordinator_map() for coord in valid_coordinators.v1)
|
||||
)
|
||||
|
||||
async def on_stop(_: Any) -> None:
|
||||
_LOGGER.debug("Shutting down roborock")
|
||||
@@ -125,7 +113,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
*(
|
||||
coordinator.async_shutdown()
|
||||
for coordinator in valid_coordinators.values()
|
||||
)
|
||||
),
|
||||
cache.flush(),
|
||||
)
|
||||
|
||||
entry.async_on_unload(
|
||||
@@ -138,6 +127,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
_remove_stale_devices(hass, entry, devices)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _remove_stale_devices(
|
||||
hass: HomeAssistant,
|
||||
entry: RoborockConfigEntry,
|
||||
devices: list[RoborockDevice],
|
||||
) -> None:
|
||||
device_map: dict[str, RoborockDevice] = {device.duid: device for device in devices}
|
||||
device_registry = dr.async_get(hass)
|
||||
device_entries = dr.async_entries_for_config_entry(
|
||||
device_registry, config_entry_id=entry.entry_id
|
||||
@@ -159,8 +159,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
remove_config_entry_id=entry.entry_id,
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_migrate_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> bool:
|
||||
"""Migrate old configuration entries to the new format."""
|
||||
@@ -190,11 +188,8 @@ async def async_migrate_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -
|
||||
def build_setup_functions(
|
||||
hass: HomeAssistant,
|
||||
entry: RoborockConfigEntry,
|
||||
device_map: dict[str, HomeDataDevice],
|
||||
devices: list[RoborockDevice],
|
||||
user_data: UserData,
|
||||
product_info: dict[str, HomeDataProduct],
|
||||
home_data_rooms: list[HomeDataRoom],
|
||||
api_client: RoborockApiClient,
|
||||
) -> list[
|
||||
Coroutine[
|
||||
Any,
|
||||
@@ -203,134 +198,45 @@ def build_setup_functions(
|
||||
]
|
||||
]:
|
||||
"""Create a list of setup functions that can later be called asynchronously."""
|
||||
return [
|
||||
setup_device(
|
||||
hass,
|
||||
entry,
|
||||
user_data,
|
||||
device,
|
||||
product_info[device.product_id],
|
||||
home_data_rooms,
|
||||
api_client,
|
||||
)
|
||||
for device in device_map.values()
|
||||
]
|
||||
coordinators: list[
|
||||
RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01
|
||||
] = []
|
||||
for device in devices:
|
||||
_LOGGER.debug("Creating device %s: %s", device.name, device)
|
||||
if device.v1_properties is not None:
|
||||
coordinators.append(
|
||||
RoborockDataUpdateCoordinator(hass, entry, device, device.v1_properties)
|
||||
)
|
||||
elif device.dyad is not None:
|
||||
coordinators.append(
|
||||
RoborockWetDryVacUpdateCoordinator(hass, entry, device, device.dyad)
|
||||
)
|
||||
elif device.zeo is not None:
|
||||
coordinators.append(
|
||||
RoborockWashingMachineUpdateCoordinator(hass, entry, device, device.zeo)
|
||||
)
|
||||
else:
|
||||
_LOGGER.warning(
|
||||
"Not adding device %s because its protocol version %s or category %s is not supported",
|
||||
device.duid,
|
||||
device.device_info.pv,
|
||||
device.product.category.name,
|
||||
)
|
||||
|
||||
return [setup_coordinator(coordinator) for coordinator in coordinators]
|
||||
|
||||
|
||||
async def setup_device(
|
||||
hass: HomeAssistant,
|
||||
entry: RoborockConfigEntry,
|
||||
user_data: UserData,
|
||||
device: HomeDataDevice,
|
||||
product_info: HomeDataProduct,
|
||||
home_data_rooms: list[HomeDataRoom],
|
||||
api_client: RoborockApiClient,
|
||||
async def setup_coordinator(
|
||||
coordinator: RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01,
|
||||
) -> RoborockDataUpdateCoordinator | RoborockDataUpdateCoordinatorA01 | None:
|
||||
"""Set up a coordinator for a given device."""
|
||||
if device.pv == "1.0":
|
||||
return await setup_device_v1(
|
||||
hass, entry, user_data, device, product_info, home_data_rooms, api_client
|
||||
)
|
||||
if device.pv == "A01":
|
||||
return await setup_device_a01(hass, entry, user_data, device, product_info)
|
||||
_LOGGER.warning(
|
||||
"Not adding device %s because its protocol version %s or category %s is not supported",
|
||||
device.duid,
|
||||
device.pv,
|
||||
product_info.category.name,
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
async def setup_device_v1(
|
||||
hass: HomeAssistant,
|
||||
entry: RoborockConfigEntry,
|
||||
user_data: UserData,
|
||||
device: HomeDataDevice,
|
||||
product_info: HomeDataProduct,
|
||||
home_data_rooms: list[HomeDataRoom],
|
||||
api_client: RoborockApiClient,
|
||||
) -> RoborockDataUpdateCoordinator | None:
|
||||
"""Set up a device Coordinator."""
|
||||
mqtt_client = await hass.async_add_executor_job(
|
||||
RoborockMqttClientV1, user_data, DeviceData(device, product_info.model)
|
||||
)
|
||||
try:
|
||||
await mqtt_client.async_connect()
|
||||
networking = await mqtt_client.get_networking()
|
||||
if networking is None:
|
||||
# If the api does not return an error but does return None for
|
||||
# get_networking - then we need to go through cache checking.
|
||||
raise RoborockException("Networking request returned None.") # noqa: TRY301
|
||||
except RoborockException as err:
|
||||
_LOGGER.warning(
|
||||
"Not setting up %s because we could not get the network information of the device. "
|
||||
"Please confirm it is online and the Roborock servers can communicate with it",
|
||||
device.name,
|
||||
)
|
||||
_LOGGER.debug(err)
|
||||
await mqtt_client.async_release()
|
||||
raise
|
||||
coordinator = RoborockDataUpdateCoordinator(
|
||||
hass,
|
||||
entry,
|
||||
device,
|
||||
networking,
|
||||
product_info,
|
||||
mqtt_client,
|
||||
home_data_rooms,
|
||||
api_client,
|
||||
user_data,
|
||||
)
|
||||
"""Set up a single coordinator."""
|
||||
try:
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
except ConfigEntryNotReady as ex:
|
||||
except ConfigEntryNotReady:
|
||||
await coordinator.async_shutdown()
|
||||
if isinstance(coordinator.api, RoborockMqttClientV1):
|
||||
_LOGGER.warning(
|
||||
"Not setting up %s because the we failed to get data for the first time using the online client. "
|
||||
"Please ensure your Home Assistant instance can communicate with this device. "
|
||||
"You may need to open firewall instances on your Home Assistant network and on your Vacuum's network",
|
||||
device.name,
|
||||
)
|
||||
# Most of the time if we fail to connect using the mqtt client, the problem is due to firewall,
|
||||
# but in case if it isn't, the error can be included in debug logs for the user to grab.
|
||||
if coordinator.last_exception:
|
||||
_LOGGER.debug(coordinator.last_exception)
|
||||
raise coordinator.last_exception from ex
|
||||
elif coordinator.last_exception:
|
||||
# If this is reached, we have verified that we can communicate with the Vacuum locally,
|
||||
# so if there is an error here - it is not a communication issue but some other problem
|
||||
extra_error = f"Please create an issue with the following error included: {coordinator.last_exception}"
|
||||
_LOGGER.warning(
|
||||
"Not setting up %s because the coordinator failed to get data for the first time using the "
|
||||
"offline client %s",
|
||||
device.name,
|
||||
extra_error,
|
||||
)
|
||||
raise coordinator.last_exception from ex
|
||||
return coordinator
|
||||
|
||||
|
||||
async def setup_device_a01(
|
||||
hass: HomeAssistant,
|
||||
entry: RoborockConfigEntry,
|
||||
user_data: UserData,
|
||||
device: HomeDataDevice,
|
||||
product_info: HomeDataProduct,
|
||||
) -> RoborockDataUpdateCoordinatorA01 | None:
|
||||
"""Set up a A01 protocol device."""
|
||||
mqtt_client = await hass.async_add_executor_job(
|
||||
RoborockMqttClientA01,
|
||||
user_data,
|
||||
DeviceData(device, product_info.model),
|
||||
product_info.category,
|
||||
)
|
||||
coord = RoborockDataUpdateCoordinatorA01(
|
||||
hass, entry, device, product_info, mqtt_client
|
||||
)
|
||||
await coord.async_config_entry_first_refresh()
|
||||
return coord
|
||||
raise
|
||||
else:
|
||||
return coordinator
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> bool:
|
||||
@@ -340,4 +246,5 @@ async def async_unload_entry(hass: HomeAssistant, entry: RoborockConfigEntry) ->
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: RoborockConfigEntry) -> None:
|
||||
"""Handle removal of an entry."""
|
||||
await async_remove_map_storage(hass, entry.entry_id)
|
||||
store = CacheStore(hass, entry.entry_id)
|
||||
await store.async_remove()
|
||||
|
||||
@@ -6,7 +6,6 @@ from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
|
||||
from roborock.data import RoborockStateCode
|
||||
from roborock.roborock_typing import DeviceProp
|
||||
|
||||
from homeassistant.components.binary_sensor import (
|
||||
BinarySensorDeviceClass,
|
||||
@@ -19,6 +18,7 @@ from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator
|
||||
from .entity import RoborockCoordinatedEntityV1
|
||||
from .models import DeviceState
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -27,9 +27,11 @@ PARALLEL_UPDATES = 0
|
||||
class RoborockBinarySensorDescription(BinarySensorEntityDescription):
|
||||
"""A class that describes Roborock binary sensors."""
|
||||
|
||||
value_fn: Callable[[DeviceProp], bool | int | None]
|
||||
# If it is a dock entity
|
||||
value_fn: Callable[[DeviceState], bool | int | None]
|
||||
"""A function that extracts the sensor value from DeviceState."""
|
||||
|
||||
is_dock_entity: bool = False
|
||||
"""Whether this sensor is for the dock."""
|
||||
|
||||
|
||||
BINARY_SENSOR_DESCRIPTIONS = [
|
||||
@@ -92,7 +94,7 @@ async def async_setup_entry(
|
||||
)
|
||||
for coordinator in config_entry.runtime_data.v1
|
||||
for description in BINARY_SENSOR_DESCRIPTIONS
|
||||
if description.value_fn(coordinator.roborock_device_info.props) is not None
|
||||
if description.value_fn(coordinator.data) is not None
|
||||
)
|
||||
|
||||
|
||||
@@ -117,8 +119,4 @@ class RoborockBinarySensorEntity(RoborockCoordinatedEntityV1, BinarySensorEntity
|
||||
@property
|
||||
def is_on(self) -> bool:
|
||||
"""Return the value reported by the sensor."""
|
||||
return bool(
|
||||
self.entity_description.value_fn(
|
||||
self.coordinator.roborock_device_info.props
|
||||
)
|
||||
)
|
||||
return bool(self.entity_description.value_fn(self.coordinator.data))
|
||||
|
||||
@@ -5,18 +5,24 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
import itertools
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from roborock.roborock_typing import RoborockCommand
|
||||
from roborock.devices.traits.v1.consumeable import ConsumableAttribute
|
||||
from roborock.exceptions import RoborockException
|
||||
|
||||
from homeassistant.components.button import ButtonEntity, ButtonEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator
|
||||
from .entity import RoborockEntity, RoborockEntityV1
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
@@ -24,40 +30,35 @@ PARALLEL_UPDATES = 0
|
||||
class RoborockButtonDescription(ButtonEntityDescription):
|
||||
"""Describes a Roborock button entity."""
|
||||
|
||||
command: RoborockCommand
|
||||
param: list | dict | None
|
||||
attribute: ConsumableAttribute
|
||||
|
||||
|
||||
CONSUMABLE_BUTTON_DESCRIPTIONS = [
|
||||
RoborockButtonDescription(
|
||||
key="reset_sensor_consumable",
|
||||
translation_key="reset_sensor_consumable",
|
||||
command=RoborockCommand.RESET_CONSUMABLE,
|
||||
param=["sensor_dirty_time"],
|
||||
attribute=ConsumableAttribute.SENSOR_DIRTY_TIME,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
RoborockButtonDescription(
|
||||
key="reset_air_filter_consumable",
|
||||
translation_key="reset_air_filter_consumable",
|
||||
command=RoborockCommand.RESET_CONSUMABLE,
|
||||
param=["filter_work_time"],
|
||||
attribute=ConsumableAttribute.FILTER_WORK_TIME,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
RoborockButtonDescription(
|
||||
key="reset_side_brush_consumable",
|
||||
translation_key="reset_side_brush_consumable",
|
||||
command=RoborockCommand.RESET_CONSUMABLE,
|
||||
param=["side_brush_work_time"],
|
||||
attribute=ConsumableAttribute.SIDE_BRUSH_WORK_TIME,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
RoborockButtonDescription(
|
||||
key="reset_main_brush_consumable",
|
||||
translation_key="reset_main_brush_consumable",
|
||||
command=RoborockCommand.RESET_CONSUMABLE,
|
||||
param=["main_brush_work_time"],
|
||||
attribute=ConsumableAttribute.MAIN_BRUSH_WORK_TIME,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
@@ -115,13 +116,26 @@ class RoborockButtonEntity(RoborockEntityV1, ButtonEntity):
|
||||
super().__init__(
|
||||
f"{entity_description.key}_{coordinator.duid_slug}",
|
||||
coordinator.device_info,
|
||||
coordinator.api,
|
||||
api=coordinator.properties_api.command,
|
||||
)
|
||||
self.entity_description = entity_description
|
||||
self._consumable = coordinator.properties_api.consumables
|
||||
|
||||
async def async_press(self) -> None:
|
||||
"""Press the button."""
|
||||
await self.send(self.entity_description.command, self.entity_description.param)
|
||||
try:
|
||||
await self._consumable.reset_consumable(self.entity_description.attribute)
|
||||
except RoborockException as err:
|
||||
# This error message could be improved since it is fairly low level
|
||||
# and technical. Can add a more user friendly message with the
|
||||
# name of the attribute being reset.
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="command_failed",
|
||||
translation_placeholders={
|
||||
"command": "RESET_CONSUMABLE",
|
||||
},
|
||||
) from err
|
||||
|
||||
|
||||
class RoborockRoutineButtonEntity(RoborockEntity, ButtonEntity):
|
||||
@@ -138,7 +152,6 @@ class RoborockRoutineButtonEntity(RoborockEntity, ButtonEntity):
|
||||
super().__init__(
|
||||
f"{entity_description.key}_{coordinator.duid_slug}",
|
||||
coordinator.device_info,
|
||||
coordinator.api,
|
||||
)
|
||||
self._routine_id = int(entity_description.key)
|
||||
self._coordinator = coordinator
|
||||
|
||||
@@ -2,35 +2,18 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import io
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any, TypeVar
|
||||
|
||||
from propcache.api import cached_property
|
||||
from roborock import HomeDataRoom
|
||||
from roborock.data import (
|
||||
DeviceData,
|
||||
HomeDataDevice,
|
||||
HomeDataProduct,
|
||||
HomeDataScene,
|
||||
NetworkInfo,
|
||||
RoborockCategory,
|
||||
UserData,
|
||||
)
|
||||
from roborock.exceptions import RoborockException
|
||||
from roborock.data import HomeDataScene
|
||||
from roborock.devices.device import RoborockDevice
|
||||
from roborock.devices.traits.a01 import DyadApi, ZeoApi
|
||||
from roborock.devices.traits.v1 import PropertiesApi
|
||||
from roborock.exceptions import RoborockDeviceBusy, RoborockException
|
||||
from roborock.roborock_message import RoborockDyadDataProtocol, RoborockZeoProtocol
|
||||
from roborock.roborock_typing import DeviceProp
|
||||
from roborock.version_1_apis.roborock_local_client_v1 import RoborockLocalClientV1
|
||||
from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1
|
||||
from roborock.version_a01_apis import RoborockClientA01
|
||||
from roborock.web_api import RoborockApiClient
|
||||
from vacuum_map_parser_base.config.color import ColorsPalette, SupportedColor
|
||||
from vacuum_map_parser_base.config.image_config import ImageConfig
|
||||
from vacuum_map_parser_base.config.size import Size, Sizes
|
||||
from vacuum_map_parser_base.map_data import MapData
|
||||
from vacuum_map_parser_roborock.map_data_parser import RoborockMapDataParser
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import ATTR_CONNECTIONS
|
||||
@@ -49,21 +32,14 @@ from homeassistant.util import dt as dt_util, slugify
|
||||
|
||||
from .const import (
|
||||
A01_UPDATE_INTERVAL,
|
||||
CONF_SHOW_BACKGROUND,
|
||||
DEFAULT_DRAWABLES,
|
||||
DOMAIN,
|
||||
DRAWABLES,
|
||||
IMAGE_CACHE_INTERVAL,
|
||||
MAP_FILE_FORMAT,
|
||||
MAP_SCALE,
|
||||
MAP_SLEEP,
|
||||
V1_CLOUD_IN_CLEANING_INTERVAL,
|
||||
V1_CLOUD_NOT_CLEANING_INTERVAL,
|
||||
V1_LOCAL_IN_CLEANING_INTERVAL,
|
||||
V1_LOCAL_NOT_CLEANING_INTERVAL,
|
||||
)
|
||||
from .models import RoborockA01HassDeviceInfo, RoborockHassDeviceInfo, RoborockMapInfo
|
||||
from .roborock_storage import RoborockMapStorage
|
||||
from .models import DeviceState
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=30)
|
||||
|
||||
@@ -87,7 +63,7 @@ class RoborockCoordinators:
|
||||
type RoborockConfigEntry = ConfigEntry[RoborockCoordinators]
|
||||
|
||||
|
||||
class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceState]):
|
||||
"""Class to manage fetching data from the API."""
|
||||
|
||||
config_entry: RoborockConfigEntry
|
||||
@@ -96,13 +72,8 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: RoborockConfigEntry,
|
||||
device: HomeDataDevice,
|
||||
device_networking: NetworkInfo,
|
||||
product_info: HomeDataProduct,
|
||||
cloud_api: RoborockMqttClientV1,
|
||||
home_data_rooms: list[HomeDataRoom],
|
||||
api_client: RoborockApiClient,
|
||||
user_data: UserData,
|
||||
device: RoborockDevice,
|
||||
properties_api: PropertiesApi,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
@@ -113,62 +84,24 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
# Assume we can use the local api.
|
||||
update_interval=V1_LOCAL_NOT_CLEANING_INTERVAL,
|
||||
)
|
||||
self.roborock_device_info = RoborockHassDeviceInfo(
|
||||
device,
|
||||
device_networking,
|
||||
product_info,
|
||||
DeviceProp(),
|
||||
)
|
||||
device_data = DeviceData(device, product_info.model, device_networking.ip)
|
||||
self.api: RoborockLocalClientV1 | RoborockMqttClientV1 = RoborockLocalClientV1(
|
||||
device_data, queue_timeout=5
|
||||
)
|
||||
self.cloud_api = cloud_api
|
||||
self._device = device
|
||||
self.properties_api = properties_api
|
||||
self.device_info = DeviceInfo(
|
||||
name=self.roborock_device_info.device.name,
|
||||
name=self._device.device_info.name,
|
||||
identifiers={(DOMAIN, self.duid)},
|
||||
manufacturer="Roborock",
|
||||
model=self.roborock_device_info.product.model,
|
||||
model_id=self.roborock_device_info.product.model,
|
||||
sw_version=self.roborock_device_info.device.fv,
|
||||
model=self._device.product.model,
|
||||
model_id=self._device.product.model,
|
||||
sw_version=self._device.device_info.fv,
|
||||
)
|
||||
self.current_map: int | None = None
|
||||
|
||||
if mac := self.roborock_device_info.network_info.mac:
|
||||
if mac := properties_api.network_info.mac:
|
||||
self.device_info[ATTR_CONNECTIONS] = {
|
||||
(dr.CONNECTION_NETWORK_MAC, dr.format_mac(mac))
|
||||
}
|
||||
# Maps from map flag to map name
|
||||
self.maps: dict[int, RoborockMapInfo] = {}
|
||||
self._home_data_rooms = {str(room.id): room.name for room in home_data_rooms}
|
||||
self.map_storage = RoborockMapStorage(
|
||||
hass, self.config_entry.entry_id, self.duid_slug
|
||||
)
|
||||
self._user_data = user_data
|
||||
self._api_client = api_client
|
||||
self._is_cloud_api = False
|
||||
drawables = [
|
||||
drawable
|
||||
for drawable, default_value in DEFAULT_DRAWABLES.items()
|
||||
if config_entry.options.get(DRAWABLES, {}).get(drawable, default_value)
|
||||
]
|
||||
colors = ColorsPalette()
|
||||
if not config_entry.options.get(CONF_SHOW_BACKGROUND, False):
|
||||
colors = ColorsPalette({SupportedColor.MAP_OUTSIDE: (0, 0, 0, 0)})
|
||||
self.map_parser = RoborockMapDataParser(
|
||||
colors,
|
||||
Sizes(
|
||||
{
|
||||
k: v * MAP_SCALE
|
||||
for k, v in Sizes.SIZES.items()
|
||||
if k != Size.MOP_PATH_WIDTH
|
||||
}
|
||||
),
|
||||
drawables,
|
||||
ImageConfig(scale=MAP_SCALE),
|
||||
[],
|
||||
)
|
||||
self.last_update_state: str | None = None
|
||||
# Keep track of last attempt to refresh maps/rooms to know when to try again.
|
||||
self._last_home_update_attempt: datetime
|
||||
self.last_home_update: datetime | None = None
|
||||
|
||||
@cached_property
|
||||
def dock_device_info(self) -> DeviceInfo:
|
||||
@@ -177,39 +110,40 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
This must happen after the coordinator does the first update.
|
||||
Which will be the case when this is called.
|
||||
"""
|
||||
dock_type = self.roborock_device_info.props.status.dock_type
|
||||
dock_type = self.properties_api.status.dock_type
|
||||
return DeviceInfo(
|
||||
name=f"{self.roborock_device_info.device.name} Dock",
|
||||
name=f"{self._device.device_info.name} Dock",
|
||||
identifiers={(DOMAIN, f"{self.duid}_dock")},
|
||||
manufacturer="Roborock",
|
||||
model=f"{self.roborock_device_info.product.model} Dock",
|
||||
model=f"{self._device.product.model} Dock",
|
||||
model_id=str(dock_type.value) if dock_type is not None else "Unknown",
|
||||
sw_version=self.roborock_device_info.device.fv,
|
||||
sw_version=self._device.device_info.fv,
|
||||
)
|
||||
|
||||
def parse_map_data_v1(
|
||||
self, map_bytes: bytes
|
||||
) -> tuple[bytes | None, MapData | None]:
|
||||
"""Parse map_bytes and return MapData and the image."""
|
||||
try:
|
||||
parsed_map = self.map_parser.parse(map_bytes)
|
||||
except (IndexError, ValueError) as err:
|
||||
_LOGGER.debug("Exception when parsing map contents: %s", err)
|
||||
return None, None
|
||||
if parsed_map.image is None:
|
||||
return None, None
|
||||
img_byte_arr = io.BytesIO()
|
||||
parsed_map.image.data.save(img_byte_arr, format=MAP_FILE_FORMAT)
|
||||
return img_byte_arr.getvalue(), parsed_map
|
||||
|
||||
async def _async_setup(self) -> None:
|
||||
"""Set up the coordinator."""
|
||||
# Verify we can communicate locally - if we can't, switch to cloud api
|
||||
await self._verify_api()
|
||||
self.api.is_available = True
|
||||
|
||||
try:
|
||||
maps = await self.api.get_multi_maps_list()
|
||||
await self.properties_api.status.refresh()
|
||||
except RoborockException as err:
|
||||
_LOGGER.debug("Failed to update data during setup: %s", err)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_data_fail",
|
||||
) from err
|
||||
|
||||
self._last_home_update_attempt = dt_util.utcnow()
|
||||
|
||||
# This populates a cache of maps/rooms so we have the information
|
||||
# even for maps that are inactive but is a no-op if we already have
|
||||
# the information. This will cycle through all the available maps and
|
||||
# requires the device to be idle. If the device is busy cleaning, then
|
||||
# we'll retry later in `update_map` and in the mean time we won't have
|
||||
# all map/room information.
|
||||
try:
|
||||
await self.properties_api.home.discover_home()
|
||||
except RoborockDeviceBusy:
|
||||
_LOGGER.info("Home discovery skipped while device is busy/cleaning")
|
||||
except RoborockException as err:
|
||||
_LOGGER.debug("Failed to get maps: %s", err)
|
||||
raise UpdateFailed(
|
||||
@@ -217,81 +151,32 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
translation_key="map_failure",
|
||||
translation_placeholders={"error": str(err)},
|
||||
) from err
|
||||
# Rooms names populated later with calls to `set_current_map_rooms` for each map
|
||||
roborock_maps = maps.map_info if (maps and maps.map_info) else ()
|
||||
stored_images = await asyncio.gather(
|
||||
*[
|
||||
self.map_storage.async_load_map(roborock_map.mapFlag)
|
||||
for roborock_map in roborock_maps
|
||||
]
|
||||
)
|
||||
self.maps = {
|
||||
roborock_map.mapFlag: RoborockMapInfo(
|
||||
flag=roborock_map.mapFlag,
|
||||
name=roborock_map.name or f"Map {roborock_map.mapFlag}",
|
||||
rooms={},
|
||||
image=image,
|
||||
last_updated=dt_util.utcnow() - IMAGE_CACHE_INTERVAL,
|
||||
map_data=None,
|
||||
)
|
||||
for image, roborock_map in zip(stored_images, roborock_maps, strict=False)
|
||||
}
|
||||
else:
|
||||
# Force a map refresh on first setup
|
||||
self.last_home_update = dt_util.utcnow() - IMAGE_CACHE_INTERVAL
|
||||
|
||||
async def update_map(self) -> None:
|
||||
"""Update the currently selected map."""
|
||||
# The current map was set in the props update, so these can be done without
|
||||
# worry of applying them to the wrong map.
|
||||
if self.current_map is None or self.current_map not in self.maps:
|
||||
# This exists as a safeguard/ to keep mypy happy.
|
||||
return
|
||||
try:
|
||||
response = await self.cloud_api.get_map_v1()
|
||||
await self.properties_api.home.discover_home()
|
||||
await self.properties_api.home.refresh()
|
||||
except RoborockException as ex:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="map_failure",
|
||||
) from ex
|
||||
if not isinstance(response, bytes):
|
||||
_LOGGER.debug("Failed to parse map contents: %s", response)
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="map_failure",
|
||||
)
|
||||
parsed_image, parsed_map = self.parse_map_data_v1(response)
|
||||
if parsed_image is None or parsed_map is None:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="map_failure",
|
||||
)
|
||||
current_roborock_map_info = self.maps[self.current_map]
|
||||
if parsed_image != self.maps[self.current_map].image:
|
||||
await self.map_storage.async_save_map(
|
||||
self.current_map,
|
||||
parsed_image,
|
||||
)
|
||||
current_roborock_map_info.image = parsed_image
|
||||
current_roborock_map_info.last_updated = dt_util.utcnow()
|
||||
current_roborock_map_info.map_data = parsed_map
|
||||
else:
|
||||
self.last_home_update = dt_util.utcnow()
|
||||
|
||||
async def _verify_api(self) -> None:
|
||||
"""Verify that the api is reachable. If it is not, switch clients."""
|
||||
if isinstance(self.api, RoborockLocalClientV1):
|
||||
try:
|
||||
await self.api.async_connect()
|
||||
await self.api.ping()
|
||||
if self._device.is_connected:
|
||||
if self._device.is_local_connected:
|
||||
async_delete_issue(
|
||||
self.hass, DOMAIN, f"cloud_api_used_{self.duid_slug}"
|
||||
)
|
||||
except RoborockException:
|
||||
_LOGGER.warning(
|
||||
"Using the cloud API for device %s. This is not recommended as it can lead to rate limiting. We recommend making your vacuum accessible by your Home Assistant instance",
|
||||
self.duid,
|
||||
)
|
||||
await self.api.async_disconnect()
|
||||
# We use the cloud api if the local api fails to connect.
|
||||
self.api = self.cloud_api
|
||||
else:
|
||||
self.update_interval = V1_CLOUD_NOT_CLEANING_INTERVAL
|
||||
self._is_cloud_api = True
|
||||
async_create_issue(
|
||||
self.hass,
|
||||
DOMAIN,
|
||||
@@ -299,100 +184,81 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
is_fixable=False,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="cloud_api_used",
|
||||
translation_placeholders={
|
||||
"device_name": self.roborock_device_info.device.name
|
||||
},
|
||||
translation_placeholders={"device_name": self._device.name},
|
||||
learn_more_url="https://www.home-assistant.io/integrations/roborock/#the-integration-tells-me-it-cannot-reach-my-vacuum-and-is-using-the-cloud-api-and-that-this-is-not-supported-or-i-am-having-any-networking-issues",
|
||||
)
|
||||
|
||||
# Right now this should never be called if the cloud api is the primary api,
|
||||
# but in the future if it is, a new else should be added.
|
||||
|
||||
async def async_shutdown(self) -> None:
|
||||
"""Shutdown the coordinator."""
|
||||
await super().async_shutdown()
|
||||
await asyncio.gather(
|
||||
self.map_storage.flush(),
|
||||
self.api.async_release(),
|
||||
self.cloud_api.async_release(),
|
||||
)
|
||||
|
||||
async def _update_device_prop(self) -> None:
|
||||
"""Update device properties."""
|
||||
if (device_prop := await self.api.get_prop()) is not None:
|
||||
self.roborock_device_info.props.update(device_prop)
|
||||
await _refresh_traits(
|
||||
[
|
||||
trait
|
||||
for trait in (
|
||||
self.properties_api.status,
|
||||
self.properties_api.consumables,
|
||||
self.properties_api.clean_summary,
|
||||
self.properties_api.dnd,
|
||||
self.properties_api.dust_collection_mode,
|
||||
self.properties_api.wash_towel_mode,
|
||||
self.properties_api.smart_wash_params,
|
||||
self.properties_api.sound_volume,
|
||||
self.properties_api.child_lock,
|
||||
self.properties_api.dust_collection_mode,
|
||||
self.properties_api.flow_led_status,
|
||||
self.properties_api.valley_electricity_timer,
|
||||
)
|
||||
if trait is not None
|
||||
]
|
||||
)
|
||||
_LOGGER.debug("Updated device properties")
|
||||
|
||||
async def _async_update_data(self) -> DeviceProp:
|
||||
async def _async_update_data(self) -> DeviceState:
|
||||
"""Update data via library."""
|
||||
try:
|
||||
# Update device props and standard api information
|
||||
await self._update_device_prop()
|
||||
# Set the new map id from the updated device props
|
||||
self._set_current_map()
|
||||
# Get the rooms for that map id.
|
||||
|
||||
# If the vacuum is currently cleaning and it has been IMAGE_CACHE_INTERVAL
|
||||
# since the last map update, you can update the map.
|
||||
new_status = self.roborock_device_info.props.status
|
||||
new_status = self.properties_api.status
|
||||
if (
|
||||
self.current_map is not None
|
||||
and (current_map := self.maps.get(self.current_map))
|
||||
and (
|
||||
(
|
||||
new_status.in_cleaning
|
||||
and (dt_util.utcnow() - current_map.last_updated)
|
||||
> IMAGE_CACHE_INTERVAL
|
||||
)
|
||||
or self.last_update_state != new_status.state_name
|
||||
)
|
||||
):
|
||||
new_status.in_cleaning
|
||||
and (dt_util.utcnow() - self._last_home_update_attempt)
|
||||
> IMAGE_CACHE_INTERVAL
|
||||
) or self.last_update_state != new_status.state_name:
|
||||
self._last_home_update_attempt = dt_util.utcnow()
|
||||
try:
|
||||
await self.update_map()
|
||||
except HomeAssistantError as err:
|
||||
_LOGGER.debug("Failed to update map: %s", err)
|
||||
await self.set_current_map_rooms()
|
||||
except RoborockException as ex:
|
||||
_LOGGER.debug("Failed to update data: %s", ex)
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_data_fail",
|
||||
) from ex
|
||||
if self.roborock_device_info.props.status.in_cleaning:
|
||||
if self._is_cloud_api:
|
||||
self.update_interval = V1_CLOUD_IN_CLEANING_INTERVAL
|
||||
else:
|
||||
|
||||
if self.properties_api.status.in_cleaning:
|
||||
if self._device.is_local_connected:
|
||||
self.update_interval = V1_LOCAL_IN_CLEANING_INTERVAL
|
||||
elif self._is_cloud_api:
|
||||
self.update_interval = V1_CLOUD_NOT_CLEANING_INTERVAL
|
||||
else:
|
||||
else:
|
||||
self.update_interval = V1_CLOUD_IN_CLEANING_INTERVAL
|
||||
elif self._device.is_local_connected:
|
||||
self.update_interval = V1_LOCAL_NOT_CLEANING_INTERVAL
|
||||
self.last_update_state = self.roborock_device_info.props.status.state_name
|
||||
return self.roborock_device_info.props
|
||||
|
||||
def _set_current_map(self) -> None:
|
||||
if (
|
||||
self.roborock_device_info.props.status is not None
|
||||
and self.roborock_device_info.props.status.current_map is not None
|
||||
):
|
||||
self.current_map = self.roborock_device_info.props.status.current_map
|
||||
|
||||
async def set_current_map_rooms(self) -> None:
|
||||
"""Fetch all of the rooms for the current map and set on RoborockMapInfo."""
|
||||
# The api is only able to access rooms for the currently selected map
|
||||
# So it is important this is only called when you have the map you care
|
||||
# about selected.
|
||||
if self.current_map is None or self.current_map not in self.maps:
|
||||
return
|
||||
room_mapping = await self.api.get_room_mapping()
|
||||
self.maps[self.current_map].rooms = {
|
||||
room.segment_id: self._home_data_rooms.get(room.iot_id, "Unknown")
|
||||
for room in room_mapping or ()
|
||||
}
|
||||
else:
|
||||
self.update_interval = V1_CLOUD_NOT_CLEANING_INTERVAL
|
||||
self.last_update_state = self.properties_api.status.state_name
|
||||
return DeviceState(
|
||||
status=self.properties_api.status,
|
||||
dnd_timer=self.properties_api.dnd,
|
||||
consumable=self.properties_api.consumables,
|
||||
clean_summary=self.properties_api.clean_summary,
|
||||
)
|
||||
|
||||
async def get_routines(self) -> list[HomeDataScene]:
|
||||
"""Get routines."""
|
||||
try:
|
||||
return await self._api_client.get_scenes(self._user_data, self.duid)
|
||||
return await self.properties_api.routines.get_routines()
|
||||
except RoborockException as err:
|
||||
_LOGGER.error("Failed to get routines %s", err)
|
||||
raise HomeAssistantError(
|
||||
@@ -406,7 +272,7 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
async def execute_routines(self, routine_id: int) -> None:
|
||||
"""Execute routines."""
|
||||
try:
|
||||
await self._api_client.execute_scene(self._user_data, routine_id)
|
||||
await self.properties_api.routines.execute_routine(routine_id)
|
||||
except RoborockException as err:
|
||||
_LOGGER.error("Failed to execute routines %s %s", routine_id, err)
|
||||
raise HomeAssistantError(
|
||||
@@ -420,85 +286,43 @@ class RoborockDataUpdateCoordinator(DataUpdateCoordinator[DeviceProp]):
|
||||
@cached_property
|
||||
def duid(self) -> str:
|
||||
"""Get the unique id of the device as specified by Roborock."""
|
||||
return self.roborock_device_info.device.duid
|
||||
return self._device.duid
|
||||
|
||||
@cached_property
|
||||
def duid_slug(self) -> str:
|
||||
"""Get the slug of the duid."""
|
||||
return slugify(self.duid)
|
||||
|
||||
async def refresh_coordinator_map(self) -> None:
|
||||
"""Get the starting map information for all maps for this device.
|
||||
@property
|
||||
def device(self) -> RoborockDevice:
|
||||
"""Get the RoborockDevice."""
|
||||
return self._device
|
||||
|
||||
The following steps must be done synchronously.
|
||||
Only one map can be loaded at a time per device.
|
||||
"""
|
||||
cur_map = self.current_map
|
||||
# This won't be None at this point as the coordinator will have run first.
|
||||
if cur_map is None:
|
||||
# If we don't have a cur map(shouldn't happen) just
|
||||
# return as we can't do anything.
|
||||
return
|
||||
if self.data.status.in_cleaning:
|
||||
# If the vacuum is cleaning, we cannot change maps
|
||||
# as it will interrupt the cleaning.
|
||||
_LOGGER.info(
|
||||
"Vacuum is cleaning, not switching to other maps to fetch rooms"
|
||||
|
||||
async def _refresh_traits(traits: list[Any]) -> None:
|
||||
"""Refresh a list of traits serially.
|
||||
|
||||
We refresh traits serially to avoid overloading the cloud servers or device
|
||||
with requests. If any single trait fails to refresh, we stop the whole
|
||||
update process and raise UpdateFailed.
|
||||
"""
|
||||
for trait in traits:
|
||||
try:
|
||||
await trait.refresh()
|
||||
except RoborockException as ex:
|
||||
_LOGGER.debug(
|
||||
"Failed to update data (%s): %s", trait.__class__.__name__, ex
|
||||
)
|
||||
# Since this is hitting the cloud api, we want to be careful and will just
|
||||
# stop here rather than retrying in the future.
|
||||
map_flags = [cur_map]
|
||||
else:
|
||||
map_flags = sorted(
|
||||
self.maps, key=lambda data: data == cur_map, reverse=True
|
||||
)
|
||||
for map_flag in map_flags:
|
||||
if map_flag != cur_map:
|
||||
# Only change the map and sleep if we have multiple maps.
|
||||
try:
|
||||
await self.cloud_api.load_multi_map(map_flag)
|
||||
except RoborockException as ex:
|
||||
_LOGGER.debug(
|
||||
"Failed to change to map %s when refreshing maps: %s",
|
||||
map_flag,
|
||||
ex,
|
||||
)
|
||||
continue
|
||||
else:
|
||||
self.current_map = map_flag
|
||||
# We cannot get the map until the roborock servers fully process the
|
||||
# map change. If the above command fails, we should still sleep, just
|
||||
# in case it executes delayed.
|
||||
await asyncio.sleep(MAP_SLEEP)
|
||||
tasks = [self.set_current_map_rooms()]
|
||||
# The image is set within async_setup, so if it exists, we have it here.
|
||||
if self.maps[map_flag].image is None:
|
||||
# If we don't have a cached map, let's update it here so that it can be
|
||||
# cached in the future.
|
||||
tasks.append(self.update_map())
|
||||
# If either of these fail, we don't care, and we want to continue.
|
||||
await asyncio.gather(*tasks, return_exceptions=True)
|
||||
|
||||
if len(self.maps) > 1 and not self.data.status.in_cleaning:
|
||||
# Set the map back to the map the user previously had selected so that it
|
||||
# does not change the end user's app.
|
||||
# Only needs to happen when we changed maps above.
|
||||
try:
|
||||
await self.cloud_api.load_multi_map(cur_map)
|
||||
except RoborockException as ex:
|
||||
_LOGGER.warning(
|
||||
"Failed to change back to map %s when refreshing maps: %s",
|
||||
cur_map,
|
||||
ex,
|
||||
)
|
||||
self.current_map = cur_map
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_data_fail",
|
||||
) from ex
|
||||
|
||||
|
||||
class RoborockDataUpdateCoordinatorA01(
|
||||
DataUpdateCoordinator[
|
||||
dict[RoborockDyadDataProtocol | RoborockZeoProtocol, StateType]
|
||||
]
|
||||
):
|
||||
_V = TypeVar("_V", bound=RoborockDyadDataProtocol | RoborockZeoProtocol)
|
||||
|
||||
|
||||
class RoborockDataUpdateCoordinatorA01(DataUpdateCoordinator[dict[_V, StateType]]):
|
||||
"""Class to manage fetching data from the API for A01 devices."""
|
||||
|
||||
config_entry: RoborockConfigEntry
|
||||
@@ -507,9 +331,7 @@ class RoborockDataUpdateCoordinatorA01(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: RoborockConfigEntry,
|
||||
device: HomeDataDevice,
|
||||
product_info: HomeDataProduct,
|
||||
api: RoborockClientA01,
|
||||
device: RoborockDevice,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
@@ -519,53 +341,88 @@ class RoborockDataUpdateCoordinatorA01(
|
||||
name=DOMAIN,
|
||||
update_interval=A01_UPDATE_INTERVAL,
|
||||
)
|
||||
self.api = api
|
||||
self._device = device
|
||||
self.device_info = DeviceInfo(
|
||||
name=device.name,
|
||||
identifiers={(DOMAIN, device.duid)},
|
||||
manufacturer="Roborock",
|
||||
model=product_info.model,
|
||||
sw_version=device.fv,
|
||||
model=device.product.model,
|
||||
sw_version=device.device_info.fv,
|
||||
)
|
||||
self.request_protocols: list[
|
||||
RoborockDyadDataProtocol | RoborockZeoProtocol
|
||||
] = []
|
||||
if product_info.category == RoborockCategory.WET_DRY_VAC:
|
||||
self.request_protocols = [
|
||||
RoborockDyadDataProtocol.STATUS,
|
||||
RoborockDyadDataProtocol.POWER,
|
||||
RoborockDyadDataProtocol.MESH_LEFT,
|
||||
RoborockDyadDataProtocol.BRUSH_LEFT,
|
||||
RoborockDyadDataProtocol.ERROR,
|
||||
RoborockDyadDataProtocol.TOTAL_RUN_TIME,
|
||||
]
|
||||
elif product_info.category == RoborockCategory.WASHING_MACHINE:
|
||||
self.request_protocols = [
|
||||
RoborockZeoProtocol.STATE,
|
||||
RoborockZeoProtocol.COUNTDOWN,
|
||||
RoborockZeoProtocol.WASHING_LEFT,
|
||||
RoborockZeoProtocol.ERROR,
|
||||
]
|
||||
else:
|
||||
_LOGGER.warning("The device you added is not yet supported")
|
||||
self.roborock_device_info = RoborockA01HassDeviceInfo(device, product_info)
|
||||
|
||||
async def _async_update_data(
|
||||
self,
|
||||
) -> dict[RoborockDyadDataProtocol | RoborockZeoProtocol, StateType]:
|
||||
return await self.api.update_values(self.request_protocols)
|
||||
|
||||
async def async_shutdown(self) -> None:
|
||||
"""Shutdown the coordinator on config entry unload."""
|
||||
await super().async_shutdown()
|
||||
await self.api.async_release()
|
||||
self.request_protocols: list[_V] = []
|
||||
|
||||
@cached_property
|
||||
def duid(self) -> str:
|
||||
"""Get the unique id of the device as specified by Roborock."""
|
||||
return self.roborock_device_info.device.duid
|
||||
return self._device.duid
|
||||
|
||||
@cached_property
|
||||
def duid_slug(self) -> str:
|
||||
"""Get the slug of the duid."""
|
||||
return slugify(self.duid)
|
||||
|
||||
@property
|
||||
def device(self) -> RoborockDevice:
|
||||
"""Get the RoborockDevice."""
|
||||
return self._device
|
||||
|
||||
|
||||
class RoborockWashingMachineUpdateCoordinator(
|
||||
RoborockDataUpdateCoordinatorA01[RoborockZeoProtocol]
|
||||
):
|
||||
"""Coordinator for Zeo devices."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: RoborockConfigEntry,
|
||||
device: RoborockDevice,
|
||||
api: ZeoApi,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(hass, config_entry, device)
|
||||
self.api = api
|
||||
self.request_protocols: list[RoborockZeoProtocol] = []
|
||||
# This currently only supports the washing machine protocols
|
||||
self.request_protocols = [
|
||||
RoborockZeoProtocol.STATE,
|
||||
RoborockZeoProtocol.COUNTDOWN,
|
||||
RoborockZeoProtocol.WASHING_LEFT,
|
||||
RoborockZeoProtocol.ERROR,
|
||||
]
|
||||
|
||||
async def _async_update_data(
|
||||
self,
|
||||
) -> dict[RoborockZeoProtocol, StateType]:
|
||||
return await self.api.query_values(self.request_protocols)
|
||||
|
||||
|
||||
class RoborockWetDryVacUpdateCoordinator(
|
||||
RoborockDataUpdateCoordinatorA01[RoborockDyadDataProtocol]
|
||||
):
|
||||
"""Coordinator for Dyad devices."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: RoborockConfigEntry,
|
||||
device: RoborockDevice,
|
||||
api: DyadApi,
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(hass, config_entry, device)
|
||||
self.api = api
|
||||
# This currenltly only supports the WetDryVac protocols
|
||||
self.request_protocols: list[RoborockDyadDataProtocol] = [
|
||||
RoborockDyadDataProtocol.STATUS,
|
||||
RoborockDyadDataProtocol.POWER,
|
||||
RoborockDyadDataProtocol.MESH_LEFT,
|
||||
RoborockDyadDataProtocol.BRUSH_LEFT,
|
||||
RoborockDyadDataProtocol.ERROR,
|
||||
RoborockDyadDataProtocol.TOTAL_RUN_TIME,
|
||||
]
|
||||
|
||||
async def _async_update_data(
|
||||
self,
|
||||
) -> dict[RoborockDyadDataProtocol, StateType]:
|
||||
return await self.api.query_values(self.request_protocols)
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.diagnostics import async_redact_data
|
||||
@@ -10,9 +11,9 @@ from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import RoborockConfigEntry
|
||||
|
||||
TO_REDACT_CONFIG = ["token", "sn", "rruid", CONF_UNIQUE_ID, "username", "uid"]
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
TO_REDACT_COORD = ["duid", "localKey", "mac", "bssid"]
|
||||
TO_REDACT_CONFIG = ["token", "sn", "rruid", CONF_UNIQUE_ID, "username", "uid"]
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -24,12 +25,7 @@ async def async_get_config_entry_diagnostics(
|
||||
return {
|
||||
"config_entry": async_redact_data(config_entry.data, TO_REDACT_CONFIG),
|
||||
"coordinators": {
|
||||
f"**REDACTED-{i}**": {
|
||||
"roborock_device_info": async_redact_data(
|
||||
coordinator.roborock_device_info.as_dict(), TO_REDACT_COORD
|
||||
),
|
||||
"api": coordinator.api.diagnostic_data,
|
||||
}
|
||||
f"**REDACTED-{i}**": coordinator.device.diagnostic_data()
|
||||
for i, coordinator in enumerate(coordinators.values())
|
||||
},
|
||||
}
|
||||
|
||||
@@ -2,19 +2,10 @@
|
||||
|
||||
from typing import Any
|
||||
|
||||
from roborock.api import RoborockClient
|
||||
from roborock.command_cache import CacheableAttribute
|
||||
from roborock.data import Consumable, Status
|
||||
from roborock.data import Status
|
||||
from roborock.devices.traits.v1.command import CommandTrait
|
||||
from roborock.exceptions import RoborockException
|
||||
from roborock.roborock_message import RoborockDataProtocol
|
||||
from roborock.roborock_typing import RoborockCommand
|
||||
from roborock.version_1_apis.roborock_client_v1 import (
|
||||
CLOUD_REQUIRED,
|
||||
AttributeCache,
|
||||
RoborockClientV1,
|
||||
)
|
||||
from roborock.version_1_apis.roborock_mqtt_client_v1 import RoborockMqttClientV1
|
||||
from roborock.version_a01_apis import RoborockClientA01
|
||||
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -34,39 +25,30 @@ class RoborockEntity(Entity):
|
||||
self,
|
||||
unique_id: str,
|
||||
device_info: DeviceInfo,
|
||||
api: RoborockClient,
|
||||
) -> None:
|
||||
"""Initialize the Roborock Device."""
|
||||
self._attr_unique_id = unique_id
|
||||
self._attr_device_info = device_info
|
||||
self._api = api
|
||||
|
||||
|
||||
class RoborockEntityV1(RoborockEntity):
|
||||
"""Representation of a base Roborock V1 Entity."""
|
||||
|
||||
_api: RoborockClientV1
|
||||
|
||||
def __init__(
|
||||
self, unique_id: str, device_info: DeviceInfo, api: RoborockClientV1
|
||||
self, unique_id: str, device_info: DeviceInfo, api: CommandTrait
|
||||
) -> None:
|
||||
"""Initialize the Roborock Device."""
|
||||
super().__init__(unique_id, device_info, api)
|
||||
super().__init__(unique_id, device_info)
|
||||
self._api = api
|
||||
|
||||
def get_cache(self, attribute: CacheableAttribute) -> AttributeCache:
|
||||
"""Get an item from the api cache."""
|
||||
return self._api.cache[attribute]
|
||||
|
||||
@classmethod
|
||||
async def _send_command(
|
||||
cls,
|
||||
async def send(
|
||||
self,
|
||||
command: RoborockCommand | str,
|
||||
api: RoborockClientV1,
|
||||
params: dict[str, Any] | list[Any] | int | None = None,
|
||||
) -> dict:
|
||||
"""Send a Roborock command with params to a given api."""
|
||||
try:
|
||||
response: dict = await api.send_command(command, params)
|
||||
response: dict = await self._api.send(command, params=params)
|
||||
except RoborockException as err:
|
||||
if isinstance(command, RoborockCommand):
|
||||
command_name = command.name
|
||||
@@ -81,31 +63,6 @@ class RoborockEntityV1(RoborockEntity):
|
||||
) from err
|
||||
return response
|
||||
|
||||
async def send(
|
||||
self,
|
||||
command: RoborockCommand | str,
|
||||
params: dict[str, Any] | list[Any] | int | None = None,
|
||||
) -> dict:
|
||||
"""Send a command to a vacuum cleaner."""
|
||||
return await self._send_command(command, self._api, params)
|
||||
|
||||
@property
|
||||
def api(self) -> RoborockClientV1:
|
||||
"""Returns the api."""
|
||||
return self._api
|
||||
|
||||
|
||||
class RoborockEntityA01(RoborockEntity):
|
||||
"""Representation of a base Roborock Entity for A01 devices."""
|
||||
|
||||
_api: RoborockClientA01
|
||||
|
||||
def __init__(
|
||||
self, unique_id: str, device_info: DeviceInfo, api: RoborockClientA01
|
||||
) -> None:
|
||||
"""Initialize the Roborock Device."""
|
||||
super().__init__(unique_id, device_info, api)
|
||||
|
||||
|
||||
class RoborockCoordinatedEntityV1(
|
||||
RoborockEntityV1, CoordinatorEntity[RoborockDataUpdateCoordinator]
|
||||
@@ -118,9 +75,6 @@ class RoborockCoordinatedEntityV1(
|
||||
self,
|
||||
unique_id: str,
|
||||
coordinator: RoborockDataUpdateCoordinator,
|
||||
listener_request: list[RoborockDataProtocol]
|
||||
| RoborockDataProtocol
|
||||
| None = None,
|
||||
is_dock_entity: bool = False,
|
||||
) -> None:
|
||||
"""Initialize the coordinated Roborock Device."""
|
||||
@@ -130,27 +84,10 @@ class RoborockCoordinatedEntityV1(
|
||||
device_info=coordinator.device_info
|
||||
if not is_dock_entity
|
||||
else coordinator.dock_device_info,
|
||||
api=coordinator.api,
|
||||
api=coordinator.properties_api.command,
|
||||
)
|
||||
CoordinatorEntity.__init__(self, coordinator=coordinator)
|
||||
self._attr_unique_id = unique_id
|
||||
if isinstance(listener_request, RoborockDataProtocol):
|
||||
listener_request = [listener_request]
|
||||
self.listener_requests = listener_request or []
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Add listeners when the device is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
for listener_request in self.listener_requests:
|
||||
self.api.add_listener(
|
||||
listener_request, self._update_from_listener, cache=self.api.cache
|
||||
)
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""Remove listeners when the device is removed from hass."""
|
||||
for listener_request in self.listener_requests:
|
||||
self.api.remove_listener(listener_request, self._update_from_listener)
|
||||
await super().async_will_remove_from_hass()
|
||||
|
||||
@property
|
||||
def _device_status(self) -> Status:
|
||||
@@ -158,36 +95,19 @@ class RoborockCoordinatedEntityV1(
|
||||
data = self.coordinator.data
|
||||
return data.status
|
||||
|
||||
@property
|
||||
def cloud_api(self) -> RoborockMqttClientV1:
|
||||
"""Return the cloud api."""
|
||||
return self.coordinator.cloud_api
|
||||
|
||||
async def send(
|
||||
self,
|
||||
command: RoborockCommand | str,
|
||||
params: dict[str, Any] | list[Any] | int | None = None,
|
||||
) -> dict:
|
||||
"""Overloads normal send command but refreshes coordinator."""
|
||||
if command in CLOUD_REQUIRED:
|
||||
res = await self._send_command(command, self.coordinator.cloud_api, params)
|
||||
else:
|
||||
res = await self._send_command(command, self._api, params)
|
||||
res = await super().send(command, params)
|
||||
await self.coordinator.async_refresh()
|
||||
return res
|
||||
|
||||
def _update_from_listener(self, value: Status | Consumable) -> None:
|
||||
"""Update the status or consumable data from a listener and then write the new entity state."""
|
||||
if isinstance(value, Status):
|
||||
self.coordinator.roborock_device_info.props.status = value
|
||||
else:
|
||||
self.coordinator.roborock_device_info.props.consumable = value
|
||||
self.coordinator.data = self.coordinator.roborock_device_info.props
|
||||
self.schedule_update_ha_state()
|
||||
|
||||
|
||||
class RoborockCoordinatedEntityA01(
|
||||
RoborockEntityA01, CoordinatorEntity[RoborockDataUpdateCoordinatorA01]
|
||||
RoborockEntity, CoordinatorEntity[RoborockDataUpdateCoordinatorA01]
|
||||
):
|
||||
"""Representation of a base a coordinated Roborock Entity."""
|
||||
|
||||
@@ -197,11 +117,10 @@ class RoborockCoordinatedEntityA01(
|
||||
coordinator: RoborockDataUpdateCoordinatorA01,
|
||||
) -> None:
|
||||
"""Initialize the coordinated Roborock Device."""
|
||||
RoborockEntityA01.__init__(
|
||||
RoborockEntity.__init__(
|
||||
self,
|
||||
unique_id=unique_id,
|
||||
device_info=coordinator.device_info,
|
||||
api=coordinator.api,
|
||||
)
|
||||
CoordinatorEntity.__init__(self, coordinator=coordinator)
|
||||
self._attr_unique_id = unique_id
|
||||
|
||||
@@ -3,10 +3,14 @@
|
||||
from datetime import datetime
|
||||
import logging
|
||||
|
||||
from roborock.devices.traits.v1.home import HomeTrait
|
||||
from roborock.devices.traits.v1.map_content import MapContent
|
||||
|
||||
from homeassistant.components.image import ImageEntity
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import RoborockConfigEntry, RoborockDataUpdateCoordinator
|
||||
@@ -28,13 +32,14 @@ async def async_setup_entry(
|
||||
(
|
||||
RoborockMap(
|
||||
config_entry,
|
||||
f"{coord.duid_slug}_map_{map_info.name}",
|
||||
coord,
|
||||
map_info.flag,
|
||||
coord.properties_api.home,
|
||||
map_info.map_flag,
|
||||
map_info.name,
|
||||
)
|
||||
for coord in config_entry.runtime_data.v1
|
||||
for map_info in coord.maps.values()
|
||||
if coord.properties_api.home is not None
|
||||
for map_info in (coord.properties_api.home.home_map_info or {}).values()
|
||||
),
|
||||
)
|
||||
|
||||
@@ -49,41 +54,55 @@ class RoborockMap(RoborockCoordinatedEntityV1, ImageEntity):
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: ConfigEntry,
|
||||
unique_id: str,
|
||||
coordinator: RoborockDataUpdateCoordinator,
|
||||
home_trait: HomeTrait,
|
||||
map_flag: int,
|
||||
map_name: str,
|
||||
) -> None:
|
||||
"""Initialize a Roborock map."""
|
||||
map_name = map_name or f"Map {map_flag}"
|
||||
# Note: Map names are not a valid unique id since they can be changed
|
||||
# in the roborock app. This should be migrated to use map flag for
|
||||
# the unique id.
|
||||
unique_id = f"{coordinator.duid_slug}_map_{map_name}"
|
||||
RoborockCoordinatedEntityV1.__init__(self, unique_id, coordinator)
|
||||
ImageEntity.__init__(self, coordinator.hass)
|
||||
self.config_entry = config_entry
|
||||
self._attr_name = map_name
|
||||
self._home_trait = home_trait
|
||||
self.map_flag = map_flag
|
||||
self.cached_map = b""
|
||||
self.cached_map: bytes | None = None
|
||||
self._attr_entity_category = EntityCategory.DIAGNOSTIC
|
||||
|
||||
@property
|
||||
def is_selected(self) -> bool:
|
||||
"""Return if this map is the currently selected map."""
|
||||
return self.map_flag == self.coordinator.current_map
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass load any previously cached maps from disk."""
|
||||
await super().async_added_to_hass()
|
||||
self._attr_image_last_updated = self.coordinator.maps[
|
||||
self.map_flag
|
||||
].last_updated
|
||||
self._attr_image_last_updated = self.coordinator.last_home_update
|
||||
self.async_write_ha_state()
|
||||
|
||||
@property
|
||||
def _map_content(self) -> MapContent | None:
|
||||
if self._home_trait.home_map_content and (
|
||||
map_content := self._home_trait.home_map_content.get(self.map_flag)
|
||||
):
|
||||
return map_content
|
||||
return None
|
||||
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
# If the coordinator has updated the map, we can update the image.
|
||||
self._attr_image_last_updated = self.coordinator.maps[
|
||||
self.map_flag
|
||||
].last_updated
|
||||
"""Handle updated data from the coordinator.
|
||||
|
||||
If the coordinator has updated the map, we can update the image.
|
||||
"""
|
||||
if (map_content := self._map_content) is None:
|
||||
return
|
||||
if self.cached_map != map_content.image_content:
|
||||
self.cached_map = map_content.image_content
|
||||
self._attr_image_last_updated = self.coordinator.last_home_update
|
||||
|
||||
super()._handle_coordinator_update()
|
||||
|
||||
async def async_image(self) -> bytes | None:
|
||||
"""Get the cached image."""
|
||||
return self.coordinator.maps[self.map_flag].image
|
||||
if (map_content := self._map_content) is None:
|
||||
raise HomeAssistantError("Map flag not found in coordinator maps")
|
||||
return map_content.image_content
|
||||
|
||||
@@ -19,7 +19,7 @@
|
||||
"loggers": ["roborock"],
|
||||
"quality_scale": "silver",
|
||||
"requirements": [
|
||||
"python-roborock==3.7.1",
|
||||
"python-roborock==3.8.4",
|
||||
"vacuum-map-parser-roborock==0.1.4"
|
||||
]
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user