mirror of
https://github.com/home-assistant/core.git
synced 2025-11-26 19:18:05 +00:00
Compare commits
99 Commits
tibber_dat
...
labs_helpe
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
35a6cce431 | ||
|
|
c41493860d | ||
|
|
f5b8ede5f9 | ||
|
|
474a60511b | ||
|
|
9657f3f832 | ||
|
|
59f4bc1908 | ||
|
|
9ebc6cbb23 | ||
|
|
f433ca7455 | ||
|
|
f77629d0f8 | ||
|
|
0ac1b22e03 | ||
|
|
1069233851 | ||
|
|
d2fd200469 | ||
|
|
20cdd9386e | ||
|
|
1be2e4f90c | ||
|
|
839f647396 | ||
|
|
7c2741bd36 | ||
|
|
d6fb268119 | ||
|
|
521a6784b4 | ||
|
|
d2ba7e8e3e | ||
|
|
405c2f96fd | ||
|
|
90ef5b1c25 | ||
|
|
562f72f321 | ||
|
|
f5ee3bd872 | ||
|
|
8dd35cb129 | ||
|
|
6fa971d393 | ||
|
|
6deff1c78f | ||
|
|
f96996b27f | ||
|
|
eb9fc66ca9 | ||
|
|
43e4fe4526 | ||
|
|
252dbb706f | ||
|
|
d7ad0cba94 | ||
|
|
159a8d39d6 | ||
|
|
8f1abb6dbb | ||
|
|
242c02890f | ||
|
|
eb793a3942 | ||
|
|
e65c47ba0f | ||
|
|
24dba24571 | ||
|
|
4c04dc00dd | ||
|
|
0c366506c5 | ||
|
|
a0323e80f5 | ||
|
|
e496fb2227 | ||
|
|
c2219aadb1 | ||
|
|
2cd0637324 | ||
|
|
293f8f7c87 | ||
|
|
1af569ae17 | ||
|
|
d4db5ec0cc | ||
|
|
4be1fa9a3a | ||
|
|
149c1e6772 | ||
|
|
e37e7574a4 | ||
|
|
37152a27ba | ||
|
|
5025af8334 | ||
|
|
ec9fb9837a | ||
|
|
30451e3aaa | ||
|
|
c66c3497c1 | ||
|
|
1063e71318 | ||
|
|
1a875b021a | ||
|
|
15328a4aff | ||
|
|
083cfb89af | ||
|
|
bd129c2085 | ||
|
|
f73bc9242b | ||
|
|
4506be5065 | ||
|
|
80c611e562 | ||
|
|
b44aafc294 | ||
|
|
af1e3205b8 | ||
|
|
1360fe7f23 | ||
|
|
b5bb8583f8 | ||
|
|
9b62c212ce | ||
|
|
8fa56ad92e | ||
|
|
f82f0a1862 | ||
|
|
878881b100 | ||
|
|
743583d9bd | ||
|
|
f537204d22 | ||
|
|
ec74be7922 | ||
|
|
3574f647d0 | ||
|
|
6c4296a0de | ||
|
|
e780e3db8c | ||
|
|
4ed2efa4e8 | ||
|
|
abef6f7b3e | ||
|
|
5556fb99e6 | ||
|
|
16669e39bd | ||
|
|
ca088d81c3 | ||
|
|
12847fb0a4 | ||
|
|
8b758c46f4 | ||
|
|
f439471dc1 | ||
|
|
5ff3233b09 | ||
|
|
22daed083f | ||
|
|
13384de464 | ||
|
|
f5e5183190 | ||
|
|
e18668b8f9 | ||
|
|
15647f2720 | ||
|
|
c961126ee5 | ||
|
|
5142c5f418 | ||
|
|
3d459704e1 | ||
|
|
5a8ddcd0b3 | ||
|
|
2667a40b92 | ||
|
|
08baa99691 | ||
|
|
d84cf26f40 | ||
|
|
ba5472da90 | ||
|
|
e20b88a54f |
80
.github/workflows/builder.yml
vendored
80
.github/workflows/builder.yml
vendored
@@ -14,6 +14,7 @@ env:
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
BASE_IMAGE_VERSION: "2025.11.0"
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -21,18 +22,15 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -79,7 +77,7 @@ jobs:
|
||||
name: Build ${{ matrix.arch }} base core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
@@ -87,7 +85,12 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
arch: ["amd64", "aarch64"]
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
@@ -116,7 +119,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -184,16 +187,59 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
--cosign \
|
||||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
|
||||
|
||||
- name: Build variables
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
echo "base_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ env.BASE_IMAGE_VERSION }}" >> "$GITHUB_OUTPUT"
|
||||
echo "cache_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Verify base image signature
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
|
||||
"${{ steps.vars.outputs.base_image }}"
|
||||
|
||||
- name: Verify cache image signature
|
||||
id: cache
|
||||
continue-on-error: true
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
|
||||
"${{ steps.vars.outputs.cache_image }}"
|
||||
|
||||
- name: Build base image
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ steps.vars.outputs.platform }}
|
||||
push: true
|
||||
cache-from: ${{ steps.cache.outcome == 'success' && steps.vars.outputs.cache_image || '' }}
|
||||
build-args: |
|
||||
BUILD_FROM=${{ steps.vars.outputs.base_image }}
|
||||
tags: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
|
||||
labels: |
|
||||
io.hass.arch=${{ matrix.arch }}
|
||||
io.hass.version=${{ needs.init.outputs.version }}
|
||||
org.opencontainers.image.created=${{ steps.vars.outputs.created }}
|
||||
org.opencontainers.image.version=${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Sign image
|
||||
run: |
|
||||
cosign sign --yes "ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}@${{ steps.build.outputs.digest }}"
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
@@ -417,7 +463,7 @@ jobs:
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -257,7 +257,7 @@ jobs:
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
17
.github/workflows/wheels.yml
vendored
17
.github/workflows/wheels.yml
vendored
@@ -28,8 +28,6 @@ jobs:
|
||||
name: Initialize wheels builder
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
@@ -37,7 +35,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -50,10 +48,6 @@ jobs:
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install -r requirements.txt
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@master
|
||||
|
||||
- name: Create requirements_diff file
|
||||
run: |
|
||||
if [[ ${{ github.event_name }} =~ (schedule|workflow_dispatch) ]]; then
|
||||
@@ -114,9 +108,10 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix: &matrix-build
|
||||
abi: ["cp313", "cp314"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
arch: ["amd64", "aarch64"]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
@@ -140,9 +135,8 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: &home-assistant-wheels home-assistant/wheels@2025.10.0
|
||||
uses: &home-assistant-wheels home-assistant/wheels@6066c17a2a4aafcf7bdfeae01717f63adfcdba98 # 2025.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -183,7 +177,6 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: *home-assistant-wheels
|
||||
with:
|
||||
|
||||
@@ -94,7 +94,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
files: ^(script/hassfest/(metadata|docker)\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
||||
@@ -120,7 +120,6 @@ homeassistant.components.blueprint.*
|
||||
homeassistant.components.bluesound.*
|
||||
homeassistant.components.bluetooth.*
|
||||
homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
homeassistant.components.bmw_connected_drive.*
|
||||
homeassistant.components.bond.*
|
||||
homeassistant.components.bosch_alarm.*
|
||||
|
||||
12
CODEOWNERS
generated
12
CODEOWNERS
generated
@@ -121,6 +121,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/androidtv/ @JeffLIrion @ollo69
|
||||
/homeassistant/components/androidtv_remote/ @tronikos @Drafteed
|
||||
/tests/components/androidtv_remote/ @tronikos @Drafteed
|
||||
/homeassistant/components/anglian_water/ @pantherale0
|
||||
/tests/components/anglian_water/ @pantherale0
|
||||
/homeassistant/components/anova/ @Lash-L
|
||||
/tests/components/anova/ @Lash-L
|
||||
/homeassistant/components/anthemav/ @hyralex
|
||||
@@ -183,8 +185,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/automation/ @home-assistant/core
|
||||
/tests/components/automation/ @home-assistant/core
|
||||
/homeassistant/components/avea/ @pattyland
|
||||
/homeassistant/components/awair/ @ahayworth @danielsjf
|
||||
/tests/components/awair/ @ahayworth @danielsjf
|
||||
/homeassistant/components/awair/ @ahayworth @ricohageman
|
||||
/tests/components/awair/ @ahayworth @ricohageman
|
||||
/homeassistant/components/aws_s3/ @tomasbedrich
|
||||
/tests/components/aws_s3/ @tomasbedrich
|
||||
/homeassistant/components/axis/ @Kane610
|
||||
@@ -472,6 +474,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/escea/ @lazdavila
|
||||
/homeassistant/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/tests/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/homeassistant/components/essent/ @jaapp
|
||||
/tests/components/essent/ @jaapp
|
||||
/homeassistant/components/eufylife_ble/ @bdr99
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
@@ -591,6 +595,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/goodwe/ @mletenay @starkillerOG
|
||||
/homeassistant/components/google/ @allenporter
|
||||
/tests/components/google/ @allenporter
|
||||
/homeassistant/components/google_air_quality/ @Thomas55555
|
||||
/tests/components/google_air_quality/ @Thomas55555
|
||||
/homeassistant/components/google_assistant/ @home-assistant/cloud
|
||||
/tests/components/google_assistant/ @home-assistant/cloud
|
||||
/homeassistant/components/google_assistant_sdk/ @tronikos
|
||||
@@ -700,6 +706,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/huawei_lte/ @scop @fphammerle
|
||||
/homeassistant/components/hue/ @marcelveldt
|
||||
/tests/components/hue/ @marcelveldt
|
||||
/homeassistant/components/hue_ble/ @flip-dots
|
||||
/tests/components/hue_ble/ @flip-dots
|
||||
/homeassistant/components/huisbaasje/ @dennisschroer
|
||||
/tests/components/huisbaasje/ @dennisschroer
|
||||
/homeassistant/components/humidifier/ @home-assistant/core @Shulyaka
|
||||
|
||||
33
Dockerfile
generated
33
Dockerfile
generated
@@ -4,32 +4,33 @@
|
||||
ARG BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
LABEL \
|
||||
io.hass.type="core" \
|
||||
org.opencontainers.image.authors="The Home Assistant Authors" \
|
||||
org.opencontainers.image.description="Open-source home automation platform running on Python 3" \
|
||||
org.opencontainers.image.documentation="https://www.home-assistant.io/docs/" \
|
||||
org.opencontainers.image.licenses="Apache-2.0" \
|
||||
org.opencontainers.image.source="https://github.com/home-assistant/core" \
|
||||
org.opencontainers.image.title="Home Assistant" \
|
||||
org.opencontainers.image.url="https://www.home-assistant.io/"
|
||||
|
||||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
UV_NO_CACHE=true
|
||||
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.9.6
|
||||
RUN \
|
||||
# Verify go2rtc can be executed
|
||||
go2rtc --version \
|
||||
# Install uv
|
||||
&& pip3 install uv==0.9.6
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
16
build.yaml
16
build.yaml
@@ -1,16 +0,0 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
labels:
|
||||
io.hass.type: core
|
||||
org.opencontainers.image.title: Home Assistant
|
||||
org.opencontainers.image.description: Open-source home automation platform running on Python 3
|
||||
org.opencontainers.image.source: https://github.com/home-assistant/core
|
||||
org.opencontainers.image.authors: The Home Assistant Authors
|
||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||
org.opencontainers.image.licenses: Apache-2.0
|
||||
@@ -2,6 +2,7 @@
|
||||
"domain": "google",
|
||||
"name": "Google",
|
||||
"integrations": [
|
||||
"google_air_quality",
|
||||
"google_assistant",
|
||||
"google_assistant_sdk",
|
||||
"google_cloud",
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "philips",
|
||||
"name": "Philips",
|
||||
"integrations": ["dynalite", "hue", "philips_js"]
|
||||
"integrations": ["dynalite", "hue", "hue_ble", "philips_js"]
|
||||
}
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "raspberry_pi",
|
||||
"name": "Raspberry Pi",
|
||||
"integrations": ["raspberry_pi", "rpi_camera", "rpi_power", "remote_rpi_gpio"]
|
||||
"integrations": ["raspberry_pi", "rpi_power", "remote_rpi_gpio"]
|
||||
}
|
||||
|
||||
@@ -75,9 +75,19 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
"""Return the current temperature.
|
||||
|
||||
If floor temperature is available, thermostat is set up for floor heating.
|
||||
"""
|
||||
if self._status.temp_floor is not None:
|
||||
return self._status.temp_floor
|
||||
return self._status.temp_air
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
return self._status.hum_air
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
@@ -126,6 +136,13 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set HVAC mode.
|
||||
|
||||
This thermostat only supports HEAT mode. The climate platform validates
|
||||
that only supported modes are passed, so this method is a no-op.
|
||||
"""
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
try:
|
||||
|
||||
@@ -59,7 +59,9 @@ rules:
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no stale device handling needed.
|
||||
|
||||
@@ -36,5 +36,28 @@
|
||||
"alarm_trigger": {
|
||||
"service": "mdi:bell-ring"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"trigger": "mdi:shield"
|
||||
},
|
||||
"armed_away": {
|
||||
"trigger": "mdi:shield-lock"
|
||||
},
|
||||
"armed_home": {
|
||||
"trigger": "mdi:shield-home"
|
||||
},
|
||||
"armed_night": {
|
||||
"trigger": "mdi:shield-moon"
|
||||
},
|
||||
"armed_vacation": {
|
||||
"trigger": "mdi:shield-airplane"
|
||||
},
|
||||
"disarmed": {
|
||||
"trigger": "mdi:shield-off"
|
||||
},
|
||||
"triggered": {
|
||||
"trigger": "mdi:bell-ring"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted alarms to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"arm_away": "Arm {entity_name} away",
|
||||
@@ -71,6 +75,15 @@
|
||||
"message": "Arming requires a code but none was given for {entity_id}."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
@@ -143,5 +156,84 @@
|
||||
"name": "Trigger"
|
||||
}
|
||||
},
|
||||
"title": "Alarm control panel"
|
||||
"title": "Alarm control panel",
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"description": "Triggers when an alarm is armed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed"
|
||||
},
|
||||
"armed_away": {
|
||||
"description": "Triggers when an alarm is armed away.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_away::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed away"
|
||||
},
|
||||
"armed_home": {
|
||||
"description": "Triggers when an alarm is armed home.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_home::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed home"
|
||||
},
|
||||
"armed_night": {
|
||||
"description": "Triggers when an alarm is armed night.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_night::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed night"
|
||||
},
|
||||
"armed_vacation": {
|
||||
"description": "Triggers when an alarm is armed vacation.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_vacation::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed vacation"
|
||||
},
|
||||
"disarmed": {
|
||||
"description": "Triggers when an alarm is disarmed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::disarmed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is disarmed"
|
||||
},
|
||||
"triggered": {
|
||||
"description": "Triggers when an alarm is triggered.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::triggered::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is triggered"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
99
homeassistant/components/alarm_control_panel/trigger.py
Normal file
99
homeassistant/components/alarm_control_panel/trigger.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Provides triggers for alarm control panels."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityStateTriggerBase,
|
||||
Trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelState
|
||||
|
||||
|
||||
def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return bool(get_supported_features(hass, entity_id) & features)
|
||||
except HomeAssistantError:
|
||||
return False
|
||||
|
||||
|
||||
class EntityStateTriggerRequiredFeatures(EntityStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_required_features: int
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if supports_feature(self._hass, entity_id, self._required_features)
|
||||
}
|
||||
|
||||
|
||||
def make_entity_state_trigger_required_features(
|
||||
domain: str, to_state: str, required_features: int
|
||||
) -> type[EntityStateTriggerBase]:
|
||||
"""Create an entity state trigger class."""
|
||||
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_to_state = to_state
|
||||
_required_features = required_features
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"armed": make_conditional_entity_state_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
AlarmControlPanelState.ARMING,
|
||||
AlarmControlPanelState.DISARMED,
|
||||
AlarmControlPanelState.DISARMING,
|
||||
AlarmControlPanelState.PENDING,
|
||||
AlarmControlPanelState.TRIGGERED,
|
||||
},
|
||||
to_states={
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
},
|
||||
),
|
||||
"armed_away": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelEntityFeature.ARM_AWAY,
|
||||
),
|
||||
"armed_home": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelEntityFeature.ARM_HOME,
|
||||
),
|
||||
"armed_night": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelEntityFeature.ARM_NIGHT,
|
||||
),
|
||||
"armed_vacation": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"disarmed": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"triggered": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.TRIGGERED),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for alarm control panels."""
|
||||
return TRIGGERS
|
||||
53
homeassistant/components/alarm_control_panel/triggers.yaml
Normal file
53
homeassistant/components/alarm_control_panel/triggers.yaml
Normal file
@@ -0,0 +1,53 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
fields: &trigger_common_fields
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
armed: *trigger_common
|
||||
|
||||
armed_away:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
armed_home:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME
|
||||
|
||||
armed_night:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
|
||||
armed_vacation:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
|
||||
disarmed: *trigger_common
|
||||
|
||||
triggered: *trigger_common
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==9.0.2"]
|
||||
"requirements": ["aioamazondevices==9.0.3"]
|
||||
}
|
||||
|
||||
70
homeassistant/components/anglian_water/__init__.py
Normal file
70
homeassistant/components/anglian_water/__init__.py
Normal file
@@ -0,0 +1,70 @@
|
||||
"""The Anglian Water integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.auth import MSOB2CAuth
|
||||
from pyanglianwater.exceptions import (
|
||||
ExpiredAccessTokenError,
|
||||
SelfAssertedError,
|
||||
SmartMeterUnavailableError,
|
||||
)
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_ACCESS_TOKEN,
|
||||
CONF_PASSWORD,
|
||||
CONF_USERNAME,
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryError
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
|
||||
|
||||
_PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: AnglianWaterConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Anglian Water from a config entry."""
|
||||
auth = MSOB2CAuth(
|
||||
username=entry.data[CONF_USERNAME],
|
||||
password=entry.data[CONF_PASSWORD],
|
||||
session=async_get_clientsession(hass),
|
||||
refresh_token=entry.data[CONF_ACCESS_TOKEN],
|
||||
account_number=entry.data[CONF_ACCOUNT_NUMBER],
|
||||
)
|
||||
try:
|
||||
await auth.send_refresh_request()
|
||||
except (ExpiredAccessTokenError, SelfAssertedError) as err:
|
||||
raise ConfigEntryAuthFailed from err
|
||||
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
|
||||
try:
|
||||
await _aw.validate_smart_meter()
|
||||
except SmartMeterUnavailableError as err:
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN, translation_key="smart_meter_unavailable"
|
||||
) from err
|
||||
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, data={**entry.data, CONF_ACCESS_TOKEN: auth.refresh_token}
|
||||
)
|
||||
entry.runtime_data = coordinator = AnglianWaterUpdateCoordinator(
|
||||
hass=hass, api=_aw, config_entry=entry
|
||||
)
|
||||
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
await hass.config_entries.async_forward_entry_setups(entry, _PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: AnglianWaterConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, _PLATFORMS)
|
||||
103
homeassistant/components/anglian_water/config_flow.py
Normal file
103
homeassistant/components/anglian_water/config_flow.py
Normal file
@@ -0,0 +1,103 @@
|
||||
"""Config flow for the Anglian Water integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import CookieJar
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.auth import BaseAuth, MSOB2CAuth
|
||||
from pyanglianwater.exceptions import (
|
||||
InvalidAccountIdError,
|
||||
SelfAssertedError,
|
||||
SmartMeterUnavailableError,
|
||||
)
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME
|
||||
from homeassistant.helpers import selector
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import CONF_ACCOUNT_NUMBER, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_USERNAME): selector.TextSelector(),
|
||||
vol.Required(CONF_PASSWORD): selector.TextSelector(
|
||||
selector.TextSelectorConfig(type=selector.TextSelectorType.PASSWORD)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def validate_credentials(auth: MSOB2CAuth) -> str | MSOB2CAuth:
|
||||
"""Validate the provided credentials."""
|
||||
try:
|
||||
await auth.send_login_request()
|
||||
except SelfAssertedError:
|
||||
return "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
return "unknown"
|
||||
_aw = AnglianWater(authenticator=auth)
|
||||
try:
|
||||
await _aw.validate_smart_meter()
|
||||
except (InvalidAccountIdError, SmartMeterUnavailableError):
|
||||
return "smart_meter_unavailable"
|
||||
return auth
|
||||
|
||||
|
||||
class AnglianWaterConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Anglian Water."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
validation_response = await validate_credentials(
|
||||
MSOB2CAuth(
|
||||
username=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_create_clientsession(
|
||||
self.hass,
|
||||
cookie_jar=CookieJar(quote_cookie=False),
|
||||
),
|
||||
account_number=user_input.get(CONF_ACCOUNT_NUMBER),
|
||||
)
|
||||
)
|
||||
if isinstance(validation_response, BaseAuth):
|
||||
account_number = (
|
||||
user_input.get(CONF_ACCOUNT_NUMBER)
|
||||
or validation_response.account_number
|
||||
)
|
||||
await self.async_set_unique_id(account_number)
|
||||
self._abort_if_unique_id_configured()
|
||||
return self.async_create_entry(
|
||||
title=account_number,
|
||||
data={
|
||||
**user_input,
|
||||
CONF_ACCESS_TOKEN: validation_response.refresh_token,
|
||||
CONF_ACCOUNT_NUMBER: account_number,
|
||||
},
|
||||
)
|
||||
if validation_response == "smart_meter_unavailable":
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=STEP_USER_DATA_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(CONF_ACCOUNT_NUMBER): selector.TextSelector(),
|
||||
}
|
||||
),
|
||||
errors={"base": validation_response},
|
||||
)
|
||||
errors["base"] = validation_response
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
|
||||
)
|
||||
4
homeassistant/components/anglian_water/const.py
Normal file
4
homeassistant/components/anglian_water/const.py
Normal file
@@ -0,0 +1,4 @@
|
||||
"""Constants for the Anglian Water integration."""
|
||||
|
||||
DOMAIN = "anglian_water"
|
||||
CONF_ACCOUNT_NUMBER = "account_number"
|
||||
49
homeassistant/components/anglian_water/coordinator.py
Normal file
49
homeassistant/components/anglian_water/coordinator.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Anglian Water data coordinator."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from pyanglianwater import AnglianWater
|
||||
from pyanglianwater.exceptions import ExpiredAccessTokenError, UnknownEndpointError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
type AnglianWaterConfigEntry = ConfigEntry[AnglianWaterUpdateCoordinator]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
UPDATE_INTERVAL = timedelta(minutes=60)
|
||||
|
||||
|
||||
class AnglianWaterUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Anglian Water data update coordinator."""
|
||||
|
||||
config_entry: AnglianWaterConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
api: AnglianWater,
|
||||
config_entry: AnglianWaterConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize update coordinator."""
|
||||
super().__init__(
|
||||
hass=hass,
|
||||
logger=_LOGGER,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
config_entry=config_entry,
|
||||
)
|
||||
self.api = api
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update data from Anglian Water's API."""
|
||||
try:
|
||||
return await self.api.update()
|
||||
except (ExpiredAccessTokenError, UnknownEndpointError) as err:
|
||||
raise UpdateFailed from err
|
||||
44
homeassistant/components/anglian_water/entity.py
Normal file
44
homeassistant/components/anglian_water/entity.py
Normal file
@@ -0,0 +1,44 @@
|
||||
"""Anglian Water entity."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
from pyanglianwater.meter import SmartMeter
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import AnglianWaterUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AnglianWaterEntity(CoordinatorEntity[AnglianWaterUpdateCoordinator]):
|
||||
"""Defines a Anglian Water entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AnglianWaterUpdateCoordinator,
|
||||
smart_meter: SmartMeter,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water entity."""
|
||||
super().__init__(coordinator)
|
||||
self.smart_meter = smart_meter
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, smart_meter.serial_number)},
|
||||
name="Smart Water Meter",
|
||||
manufacturer="Anglian Water",
|
||||
serial_number=smart_meter.serial_number,
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is loaded."""
|
||||
self.coordinator.api.updated_data_callbacks.append(self.async_write_ha_state)
|
||||
await super().async_added_to_hass()
|
||||
|
||||
async def async_will_remove_from_hass(self) -> None:
|
||||
"""When will be removed from HASS."""
|
||||
self.coordinator.api.updated_data_callbacks.remove(self.async_write_ha_state)
|
||||
await super().async_will_remove_from_hass()
|
||||
10
homeassistant/components/anglian_water/manifest.json
Normal file
10
homeassistant/components/anglian_water/manifest.json
Normal file
@@ -0,0 +1,10 @@
|
||||
{
|
||||
"domain": "anglian_water",
|
||||
"name": "Anglian Water",
|
||||
"codeowners": ["@pantherale0"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/anglian_water",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyanglianwater==2.1.0"]
|
||||
}
|
||||
83
homeassistant/components/anglian_water/quality_scale.yaml
Normal file
83
homeassistant/components/anglian_water/quality_scale.yaml
Normal file
@@ -0,0 +1,83 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom actions are defined.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: done
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: todo
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info:
|
||||
status: exempt
|
||||
comment: |
|
||||
Unable to discover meters.
|
||||
discovery:
|
||||
status: exempt
|
||||
comment: |
|
||||
Unable to discover meters.
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: done
|
||||
docs-supported-devices: done
|
||||
docs-supported-functions: done
|
||||
docs-troubleshooting: done
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default:
|
||||
status: exempt
|
||||
comment: |
|
||||
No entities are disabled by default.
|
||||
entity-translations: done
|
||||
exception-translations: done
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities do not require different icons.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
Read-only integration and no repairs are possible.
|
||||
stale-devices: todo
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
118
homeassistant/components/anglian_water/sensor.py
Normal file
118
homeassistant/components/anglian_water/sensor.py
Normal file
@@ -0,0 +1,118 @@
|
||||
"""Anglian Water sensor platform."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
from enum import StrEnum
|
||||
|
||||
from pyanglianwater.meter import SmartMeter
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
EntityCategory,
|
||||
SensorDeviceClass,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AnglianWaterConfigEntry, AnglianWaterUpdateCoordinator
|
||||
from .entity import AnglianWaterEntity
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
class AnglianWaterSensor(StrEnum):
|
||||
"""Store keys for Anglian Water sensors."""
|
||||
|
||||
YESTERDAY_CONSUMPTION = "yesterday_consumption"
|
||||
YESTERDAY_WATER_COST = "yesterday_water_cost"
|
||||
YESTERDAY_SEWERAGE_COST = "yesterday_sewerage_cost"
|
||||
LATEST_READING = "latest_reading"
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class AnglianWaterSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describes AnglianWater sensor entity."""
|
||||
|
||||
value_fn: Callable[[SmartMeter], float]
|
||||
|
||||
|
||||
ENTITY_DESCRIPTIONS: tuple[AnglianWaterSensorEntityDescription, ...] = (
|
||||
AnglianWaterSensorEntityDescription(
|
||||
key=AnglianWaterSensor.YESTERDAY_CONSUMPTION,
|
||||
native_unit_of_measurement=UnitOfVolume.LITERS,
|
||||
device_class=SensorDeviceClass.WATER,
|
||||
value_fn=lambda entity: entity.get_yesterday_consumption,
|
||||
state_class=SensorStateClass.TOTAL,
|
||||
translation_key=AnglianWaterSensor.YESTERDAY_CONSUMPTION,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
AnglianWaterSensorEntityDescription(
|
||||
key=AnglianWaterSensor.LATEST_READING,
|
||||
native_unit_of_measurement=UnitOfVolume.CUBIC_METERS,
|
||||
device_class=SensorDeviceClass.WATER,
|
||||
value_fn=lambda entity: entity.latest_read,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
translation_key=AnglianWaterSensor.LATEST_READING,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
AnglianWaterSensorEntityDescription(
|
||||
key=AnglianWaterSensor.YESTERDAY_WATER_COST,
|
||||
native_unit_of_measurement="GBP",
|
||||
device_class=SensorDeviceClass.MONETARY,
|
||||
value_fn=lambda entity: entity.yesterday_water_cost,
|
||||
translation_key=AnglianWaterSensor.YESTERDAY_WATER_COST,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
AnglianWaterSensorEntityDescription(
|
||||
key=AnglianWaterSensor.YESTERDAY_SEWERAGE_COST,
|
||||
native_unit_of_measurement="GBP",
|
||||
device_class=SensorDeviceClass.MONETARY,
|
||||
value_fn=lambda entity: entity.yesterday_sewerage_cost,
|
||||
translation_key=AnglianWaterSensor.YESTERDAY_SEWERAGE_COST,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: AnglianWaterConfigEntry,
|
||||
async_add_devices: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the sensor platform."""
|
||||
async_add_devices(
|
||||
AnglianWaterSensorEntity(
|
||||
coordinator=entry.runtime_data,
|
||||
description=entity_description,
|
||||
smart_meter=smart_meter,
|
||||
)
|
||||
for entity_description in ENTITY_DESCRIPTIONS
|
||||
for smart_meter in entry.runtime_data.api.meters.values()
|
||||
)
|
||||
|
||||
|
||||
class AnglianWaterSensorEntity(AnglianWaterEntity, SensorEntity):
|
||||
"""Defines a Anglian Water sensor."""
|
||||
|
||||
entity_description: AnglianWaterSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AnglianWaterUpdateCoordinator,
|
||||
smart_meter: SmartMeter,
|
||||
description: AnglianWaterSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize Anglian Water sensor."""
|
||||
super().__init__(coordinator, smart_meter)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{smart_meter.serial_number}_{description.key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.smart_meter)
|
||||
55
homeassistant/components/anglian_water/strings.json
Normal file
55
homeassistant/components/anglian_water/strings.json
Normal file
@@ -0,0 +1,55 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]",
|
||||
"smart_meter_unavailable": "This account does not have any smart meters associated with it. If this is unexpected, enter your Billing Account Number found at the top of your latest bill.",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"account_number": "Billing Account Number",
|
||||
"password": "[%key:common::config_flow::data::password%]",
|
||||
"username": "[%key:common::config_flow::data::username%]"
|
||||
},
|
||||
"data_description": {
|
||||
"account_number": "Your account number found on your latest bill.",
|
||||
"password": "Your password",
|
||||
"username": "Username or email used to login to the Anglian Water website."
|
||||
},
|
||||
"description": "Enter your Anglian Water account credentials to connect to Home Assistant."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"latest_reading": {
|
||||
"name": "Latest reading"
|
||||
},
|
||||
"yesterday_consumption": {
|
||||
"name": "Yesterday's usage"
|
||||
},
|
||||
"yesterday_sewerage_cost": {
|
||||
"name": "Yesterday's sewerage cost"
|
||||
},
|
||||
"yesterday_water_cost": {
|
||||
"name": "Yesterday's water cost"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"auth_expired": {
|
||||
"message": "Authentication token expired"
|
||||
},
|
||||
"service_unavailable": {
|
||||
"message": "Anglian Water services are currently unavailable for maintenance."
|
||||
},
|
||||
"smart_meter_unavailable": {
|
||||
"message": "This account no longer has a smart meter associated with it."
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -17,13 +17,7 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
)
|
||||
from .const import CONF_CHAT_MODEL, DEFAULT, DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -46,9 +40,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
# Use model from first conversation subentry for validation
|
||||
subentries = list(entry.subentries.values())
|
||||
if subentries:
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
else:
|
||||
model_id = RECOMMENDED_CHAT_MODEL
|
||||
model_id = DEFAULT[CONF_CHAT_MODEL]
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
except anthropic.AuthenticationError as err:
|
||||
|
||||
@@ -6,7 +6,7 @@ from functools import partial
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
@@ -54,17 +54,11 @@ from .const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
RECOMMENDED_WEB_SEARCH,
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
)
|
||||
|
||||
@@ -76,13 +70,13 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
DEFAULT_CONVERSATION_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
RECOMMENDED_AI_TASK_OPTIONS = {
|
||||
DEFAULT_AI_TASK_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
||||
@@ -136,13 +130,13 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
"data": DEFAULT_CONVERSATION_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
{
|
||||
"subentry_type": "ai_task_data",
|
||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||
"data": DEFAULT_AI_TASK_OPTIONS,
|
||||
"title": DEFAULT_AI_TASK_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
@@ -180,9 +174,9 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
) -> SubentryFlowResult:
|
||||
"""Add a subentry."""
|
||||
if self._subentry_type == "ai_task_data":
|
||||
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
|
||||
self.options = DEFAULT_AI_TASK_OPTIONS.copy()
|
||||
else:
|
||||
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
||||
self.options = DEFAULT_CONVERSATION_OPTIONS.copy()
|
||||
return await self.async_step_init()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
@@ -283,7 +277,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
step_schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_CHAT_MODEL,
|
||||
default=RECOMMENDED_CHAT_MODEL,
|
||||
default=DEFAULT[CONF_CHAT_MODEL],
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=await self._get_model_list(), custom_value=True
|
||||
@@ -291,11 +285,11 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_MAX_TOKENS,
|
||||
default=RECOMMENDED_MAX_TOKENS,
|
||||
default=DEFAULT[CONF_MAX_TOKENS],
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
default=DEFAULT[CONF_TEMPERATURE],
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
}
|
||||
|
||||
@@ -325,12 +319,14 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
if not model.startswith(tuple(NON_THINKING_MODELS)):
|
||||
step_schema[
|
||||
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET, default=DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
] = vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0,
|
||||
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
max=self.options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
@@ -343,15 +339,15 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=RECOMMENDED_WEB_SEARCH,
|
||||
default=DEFAULT[CONF_WEB_SEARCH],
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_MAX_USES],
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
@@ -369,9 +365,10 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
user_input = {}
|
||||
|
||||
if user_input is not None:
|
||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH) and not errors:
|
||||
if user_input.get(CONF_WEB_SEARCH, DEFAULT[CONF_WEB_SEARCH]) and not errors:
|
||||
if user_input.get(
|
||||
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
|
||||
):
|
||||
user_input.update(await self._get_location_data())
|
||||
|
||||
@@ -456,7 +453,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
}
|
||||
)
|
||||
response = await client.messages.create(
|
||||
model=RECOMMENDED_CHAT_MODEL,
|
||||
model=cast(str, DEFAULT[CONF_CHAT_MODEL]),
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
@@ -471,7 +468,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"content": "{", # hints the model to skip any preamble
|
||||
},
|
||||
],
|
||||
max_tokens=RECOMMENDED_MAX_TOKENS,
|
||||
max_tokens=cast(int, DEFAULT[CONF_MAX_TOKENS]),
|
||||
)
|
||||
_LOGGER.debug("Model response: %s", response.content)
|
||||
location_data = location_schema(
|
||||
|
||||
@@ -11,25 +11,29 @@ DEFAULT_AI_TASK_NAME = "Claude AI Task"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-5-haiku-latest"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
CONF_WEB_SEARCH = "web_search"
|
||||
RECOMMENDED_WEB_SEARCH = False
|
||||
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
|
||||
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES = 5
|
||||
CONF_WEB_SEARCH_CITY = "city"
|
||||
CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
CONF_WEB_SEARCH: False,
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
CONF_WEB_SEARCH_MAX_USES: 5,
|
||||
}
|
||||
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-5", # Both sonnet and haiku
|
||||
"claude-3-opus",
|
||||
|
||||
@@ -84,14 +84,11 @@ from .const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@@ -604,17 +601,19 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
|
||||
model_args = MessageCreateParamsStreaming(
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
system=system.content,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
thinking_budget = options.get(
|
||||
CONF_THINKING_BUDGET, DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
@@ -625,7 +624,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
|
||||
)
|
||||
|
||||
tools: list[ToolUnionParam] = []
|
||||
|
||||
@@ -14,5 +14,19 @@
|
||||
"start_conversation": {
|
||||
"service": "mdi:forum"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"trigger": "mdi:chat-sleep"
|
||||
},
|
||||
"listening": {
|
||||
"trigger": "mdi:chat-question"
|
||||
},
|
||||
"processing": {
|
||||
"trigger": "mdi:chat-processing"
|
||||
},
|
||||
"responding": {
|
||||
"trigger": "mdi:chat-alert"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted Assist satellites to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "Assist satellite",
|
||||
@@ -16,6 +20,13 @@
|
||||
"id": "Answer ID",
|
||||
"sentences": "Sentences"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -98,5 +109,51 @@
|
||||
"name": "Start conversation"
|
||||
}
|
||||
},
|
||||
"title": "Assist satellite"
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers when an Assist satellite becomes idle.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::idle::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite becomes idle"
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers when an Assist satellite starts listening.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::listening::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite starts listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers when an Assist satellite is processing.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::processing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite is processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers when an Assist satellite is responding.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::responding::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite is responding"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
19
homeassistant/components/assist_satellite/trigger.py
Normal file
19
homeassistant/components/assist_satellite/trigger.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Provides triggers for assist satellites."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"idle": make_entity_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_state_trigger(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"processing": make_entity_state_trigger(DOMAIN, AssistSatelliteState.PROCESSING),
|
||||
"responding": make_entity_state_trigger(DOMAIN, AssistSatelliteState.RESPONDING),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for assist satellites."""
|
||||
return TRIGGERS
|
||||
20
homeassistant/components/assist_satellite/triggers.yaml
Normal file
20
homeassistant/components/assist_satellite/triggers.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
idle: *trigger_common
|
||||
listening: *trigger_common
|
||||
processing: *trigger_common
|
||||
responding: *trigger_common
|
||||
@@ -29,5 +29,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,10 @@
|
||||
"dependencies": ["blueprint", "trace"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/automation",
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"new_triggers_conditions": {
|
||||
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/automation&integration_name=Automation"
|
||||
}
|
||||
},
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
|
||||
@@ -67,6 +67,14 @@
|
||||
"title": "[%key:component::automation::common::validation_failed_title%]"
|
||||
}
|
||||
},
|
||||
"preview_features": {
|
||||
"new_triggers_conditions": {
|
||||
"description": "Enables new intuitive triggers and conditions that are more user-friendly than technical state-based options.\n\nThese new automation features support targets across your entire home, letting you trigger automations for any entity, device, area, floor, or label (for example, when any light in your living room turned on). Integrations can now also provide their own intuitive triggers and conditions, just like actions.\n\nThis preview also includes a new tree view to help you navigate your home when adding triggers, conditions, and actions.",
|
||||
"disable_confirmation": "Disabling this preview will cause automations and scripts that use the new intuitive triggers and conditions to fail.\n\nBefore disabling, ensure that your automations or scripts do not rely on this feature.",
|
||||
"enable_confirmation": "This feature is still in development and may change. These new intuitive triggers and conditions are being refined based on user feedback and are not yet complete.\n\nBy enabling this preview, you'll have early access to these new capabilities, but be aware that they may be modified or updated in future releases.",
|
||||
"name": "Intuitive triggers and conditions"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"description": "Reloads the automation configuration.",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "awair",
|
||||
"name": "Awair",
|
||||
"codeowners": ["@ahayworth", "@danielsjf"],
|
||||
"codeowners": ["@ahayworth", "@ricohageman"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/awair",
|
||||
"iot_class": "local_polling",
|
||||
|
||||
98
homeassistant/components/awair/quality_scale.yaml
Normal file
98
homeassistant/components/awair/quality_scale.yaml
Normal file
@@ -0,0 +1,98 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No actions defined
|
||||
appropriate-polling:
|
||||
status: done
|
||||
comment: |
|
||||
We fetch both user and devices, could probably slow one down
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
data_description fields are missing
|
||||
Should not abort in cloud step when anything else than invalid access token
|
||||
Find out why access token is optional
|
||||
Discovered devices step is redundant
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Move happy flow to the top and merge with `test_show_form`
|
||||
Reuse `result`
|
||||
Cloud tests should initialize with data directly
|
||||
Tests should finish in CREATE_ENTRY
|
||||
dependency-transparency:
|
||||
status: todo
|
||||
comment: |
|
||||
Dependency is not built in the CI
|
||||
docs-actions: todo
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: No explicit event subscription
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: No actions defined
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Patch objects where we use them
|
||||
Use test helpers to load JSON
|
||||
typo `no_devicess_fixture`
|
||||
Make common config entries for cloud and local
|
||||
Test setup of the integration
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: done
|
||||
comment: |
|
||||
Can move to shorthand attribute
|
||||
Can remove typecast
|
||||
diagnostics: todo
|
||||
discovery: done
|
||||
discovery-update-info: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class:
|
||||
status: done
|
||||
comment: |
|
||||
Can remove rounding
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -1 +0,0 @@
|
||||
"""The bluetooth_tracker component."""
|
||||
@@ -1,10 +0,0 @@
|
||||
"""Constants for the Bluetooth Tracker component."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "bluetooth_tracker"
|
||||
SERVICE_UPDATE: Final = "update"
|
||||
|
||||
BT_PREFIX: Final = "BT_"
|
||||
CONF_REQUEST_RSSI: Final = "request_rssi"
|
||||
DEFAULT_DEVICE_ID: Final = -1
|
||||
@@ -1,213 +0,0 @@
|
||||
"""Tracking for bluetooth devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
import bluetooth
|
||||
from bt_proximity import BluetoothRSSI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_TRACK_NEW,
|
||||
DEFAULT_TRACK_NEW,
|
||||
PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA,
|
||||
SCAN_INTERVAL,
|
||||
SourceType,
|
||||
)
|
||||
from homeassistant.components.device_tracker.legacy import (
|
||||
YAML_DEVICES,
|
||||
AsyncSeeCallback,
|
||||
Device,
|
||||
async_load_config,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
BT_PREFIX,
|
||||
CONF_REQUEST_RSSI,
|
||||
DEFAULT_DEVICE_ID,
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE,
|
||||
)
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_SCHEMA: Final = DEVICE_TRACKER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_TRACK_NEW): cv.boolean,
|
||||
vol.Optional(CONF_REQUEST_RSSI): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_ID, default=DEFAULT_DEVICE_ID): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=-1)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def is_bluetooth_device(device: Device) -> bool:
|
||||
"""Check whether a device is a bluetooth device by its mac."""
|
||||
return device.mac is not None and device.mac[:3].upper() == BT_PREFIX
|
||||
|
||||
|
||||
def discover_devices(device_id: int) -> list[tuple[str, str]]:
|
||||
"""Discover Bluetooth devices."""
|
||||
try:
|
||||
result = bluetooth.discover_devices(
|
||||
duration=8,
|
||||
lookup_names=True,
|
||||
flush_cache=True,
|
||||
lookup_class=False,
|
||||
device_id=device_id,
|
||||
)
|
||||
except OSError as ex:
|
||||
# OSError is generally thrown if a bluetooth device isn't found
|
||||
_LOGGER.error("Couldn't discover bluetooth devices: %s", ex)
|
||||
return []
|
||||
_LOGGER.debug("Bluetooth devices discovered = %d", len(result))
|
||||
return result # type: ignore[no-any-return]
|
||||
|
||||
|
||||
async def see_device(
|
||||
hass: HomeAssistant,
|
||||
async_see: AsyncSeeCallback,
|
||||
mac: str,
|
||||
device_name: str,
|
||||
rssi: tuple[int] | None = None,
|
||||
) -> None:
|
||||
"""Mark a device as seen."""
|
||||
attributes = {}
|
||||
if rssi is not None:
|
||||
attributes["rssi"] = rssi
|
||||
|
||||
await async_see(
|
||||
mac=f"{BT_PREFIX}{mac}",
|
||||
host_name=device_name,
|
||||
attributes=attributes,
|
||||
source_type=SourceType.BLUETOOTH,
|
||||
)
|
||||
|
||||
|
||||
async def get_tracking_devices(hass: HomeAssistant) -> tuple[set[str], set[str]]:
|
||||
"""Load all known devices.
|
||||
|
||||
We just need the devices so set consider_home and home range to 0
|
||||
"""
|
||||
yaml_path: str = hass.config.path(YAML_DEVICES)
|
||||
|
||||
devices = await async_load_config(yaml_path, hass, timedelta(0))
|
||||
bluetooth_devices = [device for device in devices if is_bluetooth_device(device)]
|
||||
|
||||
devices_to_track: set[str] = {
|
||||
device.mac[3:]
|
||||
for device in bluetooth_devices
|
||||
if device.track and device.mac is not None
|
||||
}
|
||||
devices_to_not_track: set[str] = {
|
||||
device.mac[3:]
|
||||
for device in bluetooth_devices
|
||||
if not device.track and device.mac is not None
|
||||
}
|
||||
|
||||
return devices_to_track, devices_to_not_track
|
||||
|
||||
|
||||
def lookup_name(mac: str) -> str | None:
|
||||
"""Lookup a Bluetooth device name."""
|
||||
_LOGGER.debug("Scanning %s", mac)
|
||||
return bluetooth.lookup_name(mac, timeout=5) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
async def async_setup_scanner(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_see: AsyncSeeCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> bool:
|
||||
"""Set up the Bluetooth Scanner."""
|
||||
device_id: int = config[CONF_DEVICE_ID]
|
||||
interval: timedelta = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
|
||||
request_rssi: bool = config.get(CONF_REQUEST_RSSI, False)
|
||||
update_bluetooth_lock = asyncio.Lock()
|
||||
|
||||
# If track new devices is true discover new devices on startup.
|
||||
track_new: bool = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
|
||||
_LOGGER.debug("Tracking new devices is set to %s", track_new)
|
||||
|
||||
devices_to_track, devices_to_not_track = await get_tracking_devices(hass)
|
||||
|
||||
if not devices_to_track and not track_new:
|
||||
_LOGGER.debug("No Bluetooth devices to track and not tracking new devices")
|
||||
|
||||
if request_rssi:
|
||||
_LOGGER.debug("Detecting RSSI for devices")
|
||||
|
||||
async def perform_bluetooth_update() -> None:
|
||||
"""Discover Bluetooth devices and update status."""
|
||||
_LOGGER.debug("Performing Bluetooth devices discovery and update")
|
||||
tasks: list[asyncio.Task[None]] = []
|
||||
|
||||
try:
|
||||
if track_new:
|
||||
devices = await hass.async_add_executor_job(discover_devices, device_id)
|
||||
for mac, _device_name in devices:
|
||||
if mac not in devices_to_track and mac not in devices_to_not_track:
|
||||
devices_to_track.add(mac)
|
||||
|
||||
for mac in devices_to_track:
|
||||
friendly_name = await hass.async_add_executor_job(lookup_name, mac)
|
||||
if friendly_name is None:
|
||||
# Could not lookup device name
|
||||
continue
|
||||
|
||||
rssi = None
|
||||
if request_rssi:
|
||||
client = BluetoothRSSI(mac)
|
||||
rssi = await hass.async_add_executor_job(client.request_rssi)
|
||||
client.close()
|
||||
|
||||
tasks.append(
|
||||
asyncio.create_task(
|
||||
see_device(hass, async_see, mac, friendly_name, rssi)
|
||||
)
|
||||
)
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
except bluetooth.BluetoothError:
|
||||
_LOGGER.exception("Error looking up Bluetooth device")
|
||||
|
||||
async def update_bluetooth(now: datetime | None = None) -> None:
|
||||
"""Lookup Bluetooth devices and update status."""
|
||||
# If an update is in progress, we don't do anything
|
||||
if update_bluetooth_lock.locked():
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Previous execution of update_bluetooth is taking longer than the"
|
||||
" scheduled update of interval %s"
|
||||
),
|
||||
interval,
|
||||
)
|
||||
return
|
||||
|
||||
async with update_bluetooth_lock:
|
||||
await perform_bluetooth_update()
|
||||
|
||||
async def handle_manual_update_bluetooth(call: ServiceCall) -> None:
|
||||
"""Update bluetooth devices on demand."""
|
||||
await update_bluetooth()
|
||||
|
||||
hass.async_create_task(update_bluetooth())
|
||||
async_track_time_interval(hass, update_bluetooth, interval)
|
||||
|
||||
hass.services.async_register(DOMAIN, SERVICE_UPDATE, handle_manual_update_bluetooth)
|
||||
|
||||
return True
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"update": {
|
||||
"service": "mdi:update"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "bluetooth_tracker",
|
||||
"name": "Bluetooth Tracker",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluetooth_tracker",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bluetooth", "bt_proximity"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["bt-proximity==0.2.1", "PyBluez==0.22"]
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
update:
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"update": {
|
||||
"description": "Triggers manual tracker update.",
|
||||
"name": "Update"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -96,5 +96,16 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:power-on"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:power-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:power-on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted climates to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"set_hvac_mode": "Change HVAC mode on {entity_name}",
|
||||
@@ -187,6 +191,13 @@
|
||||
"heat_cool": "Heat/cool",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -285,5 +296,40 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Climate"
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"started_heating": {
|
||||
"description": "Triggers when a climate starts to heat.",
|
||||
"description_configured": "[%key:component::climate::triggers::started_heating::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate starts to heat"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers when a climate is turned off.",
|
||||
"description_configured": "[%key:component::climate::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate is turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a climate is turned on.",
|
||||
"description_configured": "[%key:component::climate::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate is turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
37
homeassistant/components/climate/trigger.py
Normal file
37
homeassistant/components/climate/trigger.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Provides triggers for climates."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_attribute_trigger,
|
||||
make_entity_state_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_conditional_entity_state_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
HVACMode.OFF,
|
||||
},
|
||||
to_states={
|
||||
HVACMode.AUTO,
|
||||
HVACMode.COOL,
|
||||
HVACMode.DRY,
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.HEAT_COOL,
|
||||
},
|
||||
),
|
||||
"started_heating": make_entity_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.HEATING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for climates."""
|
||||
return TRIGGERS
|
||||
19
homeassistant/components/climate/triggers.yaml
Normal file
19
homeassistant/components/climate/triggers.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
|
||||
started_heating: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
@@ -77,7 +77,13 @@ from .subscription import async_subscription_info
|
||||
|
||||
DEFAULT_MODE = MODE_PROD
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.STT, Platform.TTS]
|
||||
PLATFORMS = [
|
||||
Platform.AI_TASK,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CONVERSATION,
|
||||
Platform.STT,
|
||||
Platform.TTS,
|
||||
]
|
||||
|
||||
SERVICE_REMOTE_CONNECT = "remote_connect"
|
||||
SERVICE_REMOTE_DISCONNECT = "remote_disconnect"
|
||||
|
||||
200
homeassistant/components/cloud/ai_task.py
Normal file
200
homeassistant/components/cloud/ai_task.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""AI Task integration for Home Assistant Cloud."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMError,
|
||||
LLMImageAttachment,
|
||||
LLMRateLimitError,
|
||||
LLMResponseError,
|
||||
LLMServiceError,
|
||||
)
|
||||
from PIL import Image
|
||||
|
||||
from homeassistant.components import ai_task, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .const import AI_TASK_ENTITY_UNIQUE_ID, DATA_CLOUD
|
||||
from .entity import BaseCloudLLMEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _convert_image_for_editing(data: bytes) -> tuple[bytes, str]:
|
||||
"""Ensure the image data is in a format accepted by OpenAI image edits."""
|
||||
stream = io.BytesIO(data)
|
||||
with Image.open(stream) as img:
|
||||
mode = img.mode
|
||||
if mode not in ("RGBA", "LA", "L"):
|
||||
img = img.convert("RGBA")
|
||||
|
||||
output = io.BytesIO()
|
||||
if img.mode in ("RGBA", "LA", "L"):
|
||||
img.save(output, format="PNG")
|
||||
return output.getvalue(), "image/png"
|
||||
|
||||
img.save(output, format=img.format or "PNG")
|
||||
return output.getvalue(), f"image/{(img.format or 'png').lower()}"
|
||||
|
||||
|
||||
async def async_prepare_image_generation_attachments(
|
||||
hass: HomeAssistant, attachments: list[conversation.Attachment]
|
||||
) -> list[LLMImageAttachment]:
|
||||
"""Load attachment data for image generation."""
|
||||
|
||||
def prepare() -> list[LLMImageAttachment]:
|
||||
items: list[LLMImageAttachment] = []
|
||||
for attachment in attachments:
|
||||
if not attachment.mime_type or not attachment.mime_type.startswith(
|
||||
"image/"
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
"Only image attachments are supported for image generation"
|
||||
)
|
||||
path = attachment.path
|
||||
if not path.exists():
|
||||
raise HomeAssistantError(f"`{path}` does not exist")
|
||||
|
||||
data = path.read_bytes()
|
||||
mime_type = attachment.mime_type
|
||||
|
||||
try:
|
||||
data, mime_type = _convert_image_for_editing(data)
|
||||
except HomeAssistantError:
|
||||
raise
|
||||
except Exception as err:
|
||||
raise HomeAssistantError("Failed to process image attachment") from err
|
||||
|
||||
items.append(
|
||||
LLMImageAttachment(
|
||||
filename=path.name,
|
||||
mime_type=mime_type,
|
||||
data=data,
|
||||
)
|
||||
)
|
||||
|
||||
return items
|
||||
|
||||
return await hass.async_add_executor_job(prepare)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Home Assistant Cloud AI Task entity."""
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
try:
|
||||
await cloud.llm.async_ensure_token()
|
||||
except LLMError:
|
||||
return
|
||||
|
||||
async_add_entities([CloudLLMTaskEntity(cloud, config_entry)])
|
||||
|
||||
|
||||
class CloudLLMTaskEntity(ai_task.AITaskEntity, BaseCloudLLMEntity):
|
||||
"""Home Assistant Cloud AI Task entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
| ai_task.AITaskEntityFeature.GENERATE_IMAGE
|
||||
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
|
||||
)
|
||||
_attr_translation_key = "cloud_ai"
|
||||
_attr_unique_id = AI_TASK_ENTITY_UNIQUE_ID
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the entity is available."""
|
||||
return self._cloud.is_logged_in and self._cloud.valid_subscription
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
task: ai_task.GenDataTask,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenDataTaskResult:
|
||||
"""Handle a generate data task."""
|
||||
await self._async_handle_chat_log(
|
||||
"ai_task", chat_log, task.name, task.structure
|
||||
)
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
raise HomeAssistantError(
|
||||
"Last content in chat log is not an AssistantContent"
|
||||
)
|
||||
|
||||
text = chat_log.content[-1].content or ""
|
||||
|
||||
if not task.structure:
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=text,
|
||||
)
|
||||
try:
|
||||
data = json_loads(text)
|
||||
except JSONDecodeError as err:
|
||||
_LOGGER.error(
|
||||
"Failed to parse JSON response: %s. Response: %s",
|
||||
err,
|
||||
text,
|
||||
)
|
||||
raise HomeAssistantError("Error with OpenAI structured response") from err
|
||||
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=data,
|
||||
)
|
||||
|
||||
async def _async_generate_image(
|
||||
self,
|
||||
task: ai_task.GenImageTask,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenImageTaskResult:
|
||||
"""Handle a generate image task."""
|
||||
attachments: list[LLMImageAttachment] | None = None
|
||||
if task.attachments:
|
||||
attachments = await async_prepare_image_generation_attachments(
|
||||
self.hass, task.attachments
|
||||
)
|
||||
|
||||
try:
|
||||
if attachments is None:
|
||||
image = await self._cloud.llm.async_generate_image(
|
||||
prompt=task.instructions,
|
||||
)
|
||||
else:
|
||||
image = await self._cloud.llm.async_edit_image(
|
||||
prompt=task.instructions,
|
||||
attachments=attachments,
|
||||
)
|
||||
except LLMAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("Cloud LLM authentication failed") from err
|
||||
except LLMRateLimitError as err:
|
||||
raise HomeAssistantError("Cloud LLM is rate limited") from err
|
||||
except LLMResponseError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
except LLMServiceError as err:
|
||||
raise HomeAssistantError("Error talking to Cloud LLM") from err
|
||||
except LLMError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
|
||||
return ai_task.GenImageTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
mime_type=image["mime_type"],
|
||||
image_data=image["image_data"],
|
||||
model=image.get("model"),
|
||||
width=image.get("width"),
|
||||
height=image.get("height"),
|
||||
revised_prompt=image.get("revised_prompt"),
|
||||
)
|
||||
@@ -91,6 +91,8 @@ DISPATCHER_REMOTE_UPDATE: SignalType[Any] = SignalType("cloud_remote_update")
|
||||
|
||||
STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text"
|
||||
TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech"
|
||||
AI_TASK_ENTITY_UNIQUE_ID = "cloud-ai-task"
|
||||
CONVERSATION_ENTITY_UNIQUE_ID = "cloud-conversation-agent"
|
||||
|
||||
LOGIN_MFA_TIMEOUT = 60
|
||||
|
||||
|
||||
75
homeassistant/components/cloud/conversation.py
Normal file
75
homeassistant/components/cloud/conversation.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Conversation support for Home Assistant Cloud."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Literal
|
||||
|
||||
from hass_nabucasa.llm import LLMError
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import CONVERSATION_ENTITY_UNIQUE_ID, DATA_CLOUD, DOMAIN
|
||||
from .entity import BaseCloudLLMEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Home Assistant Cloud conversation entity."""
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
try:
|
||||
await cloud.llm.async_ensure_token()
|
||||
except LLMError:
|
||||
return
|
||||
|
||||
async_add_entities([CloudConversationEntity(cloud, config_entry)])
|
||||
|
||||
|
||||
class CloudConversationEntity(
|
||||
conversation.ConversationEntity,
|
||||
BaseCloudLLMEntity,
|
||||
):
|
||||
"""Home Assistant Cloud conversation agent."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = "Home Assistant Cloud"
|
||||
_attr_translation_key = "cloud_conversation"
|
||||
_attr_unique_id = CONVERSATION_ENTITY_UNIQUE_ID
|
||||
_attr_supported_features = conversation.ConversationEntityFeature.CONTROL
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the entity is available."""
|
||||
return self._cloud.is_logged_in and self._cloud.valid_subscription
|
||||
|
||||
@property
|
||||
def supported_languages(self) -> list[str] | Literal["*"]:
|
||||
"""Return a list of supported languages."""
|
||||
return MATCH_ALL
|
||||
|
||||
async def _async_handle_message(
|
||||
self,
|
||||
user_input: conversation.ConversationInput,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> conversation.ConversationResult:
|
||||
"""Process a user input."""
|
||||
try:
|
||||
await chat_log.async_provide_llm_data(
|
||||
user_input.as_llm_context(DOMAIN),
|
||||
llm.LLM_API_ASSIST,
|
||||
None,
|
||||
user_input.extra_system_prompt,
|
||||
)
|
||||
except conversation.ConverseError as err:
|
||||
return err.as_conversation_result()
|
||||
|
||||
await self._async_handle_chat_log("conversation", chat_log)
|
||||
|
||||
return conversation.async_get_result_from_chat_log(user_input, chat_log)
|
||||
615
homeassistant/components/cloud/entity.py
Normal file
615
homeassistant/components/cloud/entity.py
Normal file
@@ -0,0 +1,615 @@
|
||||
"""Helpers for cloud LLM chat handling."""
|
||||
|
||||
import base64
|
||||
from collections.abc import AsyncGenerator, Callable, Iterable
|
||||
from enum import Enum
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMError,
|
||||
LLMRateLimitError,
|
||||
LLMResponseError,
|
||||
LLMServiceError,
|
||||
)
|
||||
from litellm import (
|
||||
ResponseFunctionToolCall,
|
||||
ResponseInputParam,
|
||||
ResponsesAPIStreamEvents,
|
||||
)
|
||||
from openai.types.responses import (
|
||||
FunctionToolParam,
|
||||
ResponseInputItemParam,
|
||||
ResponseReasoningItem,
|
||||
ToolParam,
|
||||
WebSearchToolParam,
|
||||
)
|
||||
from openai.types.responses.response_input_param import (
|
||||
ImageGenerationCall as ImageGenerationCallParam,
|
||||
)
|
||||
from openai.types.responses.response_output_item import ImageGenerationCall
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .client import CloudClient
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
|
||||
class ResponseItemType(str, Enum):
|
||||
"""Response item types."""
|
||||
|
||||
FUNCTION_CALL = "function_call"
|
||||
MESSAGE = "message"
|
||||
REASONING = "reasoning"
|
||||
WEB_SEARCH_CALL = "web_search_call"
|
||||
IMAGE = "image"
|
||||
|
||||
|
||||
def _convert_content_to_param(
|
||||
chat_content: Iterable[conversation.Content],
|
||||
) -> ResponseInputParam:
|
||||
"""Convert any native chat message for this agent to the native format."""
|
||||
messages: ResponseInputParam = []
|
||||
reasoning_summary: list[str] = []
|
||||
web_search_calls: dict[str, dict[str, Any]] = {}
|
||||
|
||||
for content in chat_content:
|
||||
if isinstance(content, conversation.ToolResultContent):
|
||||
if (
|
||||
content.tool_name == "web_search_call"
|
||||
and content.tool_call_id in web_search_calls
|
||||
):
|
||||
web_search_call = web_search_calls.pop(content.tool_call_id)
|
||||
web_search_call["status"] = content.tool_result.get(
|
||||
"status", "completed"
|
||||
)
|
||||
messages.append(cast("ResponseInputItemParam", web_search_call))
|
||||
else:
|
||||
messages.append(
|
||||
{
|
||||
"type": "function_call_output",
|
||||
"call_id": content.tool_call_id,
|
||||
"output": json.dumps(content.tool_result),
|
||||
}
|
||||
)
|
||||
continue
|
||||
|
||||
if content.content:
|
||||
role: Literal["user", "assistant", "system", "developer"] = content.role
|
||||
if role == "system":
|
||||
role = "developer"
|
||||
messages.append(
|
||||
{"type": "message", "role": role, "content": content.content}
|
||||
)
|
||||
|
||||
if isinstance(content, conversation.AssistantContent):
|
||||
if content.tool_calls:
|
||||
for tool_call in content.tool_calls:
|
||||
if (
|
||||
tool_call.external
|
||||
and tool_call.tool_name == "web_search_call"
|
||||
and "action" in tool_call.tool_args
|
||||
):
|
||||
web_search_calls[tool_call.id] = {
|
||||
"type": "web_search_call",
|
||||
"id": tool_call.id,
|
||||
"action": tool_call.tool_args["action"],
|
||||
"status": "completed",
|
||||
}
|
||||
else:
|
||||
messages.append(
|
||||
{
|
||||
"type": "function_call",
|
||||
"name": tool_call.tool_name,
|
||||
"arguments": json.dumps(tool_call.tool_args),
|
||||
"call_id": tool_call.id,
|
||||
}
|
||||
)
|
||||
|
||||
if content.thinking_content:
|
||||
reasoning_summary.append(content.thinking_content)
|
||||
|
||||
if isinstance(content.native, ResponseReasoningItem):
|
||||
messages.append(
|
||||
{
|
||||
"type": "reasoning",
|
||||
"id": content.native.id,
|
||||
"summary": (
|
||||
[
|
||||
{
|
||||
"type": "summary_text",
|
||||
"text": summary,
|
||||
}
|
||||
for summary in reasoning_summary
|
||||
]
|
||||
if content.thinking_content
|
||||
else []
|
||||
),
|
||||
"encrypted_content": content.native.encrypted_content,
|
||||
}
|
||||
)
|
||||
reasoning_summary = []
|
||||
|
||||
elif isinstance(content.native, ImageGenerationCall):
|
||||
messages.append(
|
||||
cast(ImageGenerationCallParam, content.native.to_dict())
|
||||
)
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool,
|
||||
custom_serializer: Callable[[Any], Any] | None,
|
||||
) -> ToolParam:
|
||||
"""Format a Home Assistant tool for the OpenAI Responses API."""
|
||||
parameters = convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
|
||||
spec: FunctionToolParam = {
|
||||
"type": "function",
|
||||
"name": tool.name,
|
||||
"strict": False,
|
||||
"description": tool.description,
|
||||
"parameters": parameters,
|
||||
}
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
def _adjust_schema(schema: dict[str, Any]) -> None:
|
||||
"""Adjust the schema to be compatible with OpenAI API."""
|
||||
if schema["type"] == "object":
|
||||
schema.setdefault("strict", True)
|
||||
schema.setdefault("additionalProperties", False)
|
||||
if "properties" not in schema:
|
||||
return
|
||||
|
||||
if "required" not in schema:
|
||||
schema["required"] = []
|
||||
|
||||
# Ensure all properties are required
|
||||
for prop, prop_info in schema["properties"].items():
|
||||
_adjust_schema(prop_info)
|
||||
if prop not in schema["required"]:
|
||||
prop_info["type"] = [prop_info["type"], "null"]
|
||||
schema["required"].append(prop)
|
||||
|
||||
elif schema["type"] == "array":
|
||||
if "items" not in schema:
|
||||
return
|
||||
|
||||
_adjust_schema(schema["items"])
|
||||
|
||||
|
||||
def _format_structured_output(
|
||||
schema: vol.Schema, llm_api: llm.APIInstance | None
|
||||
) -> dict[str, Any]:
|
||||
"""Format the schema to be compatible with OpenAI API."""
|
||||
result: dict[str, Any] = convert(
|
||||
schema,
|
||||
custom_serializer=(
|
||||
llm_api.custom_serializer if llm_api else llm.selector_serializer
|
||||
),
|
||||
)
|
||||
|
||||
_ensure_schema_constraints(result)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _ensure_schema_constraints(schema: dict[str, Any]) -> None:
|
||||
"""Ensure generated schemas match the Responses API expectations."""
|
||||
schema_type = schema.get("type")
|
||||
|
||||
if schema_type == "object":
|
||||
schema.setdefault("additionalProperties", False)
|
||||
properties = schema.get("properties")
|
||||
if isinstance(properties, dict):
|
||||
for property_schema in properties.values():
|
||||
if isinstance(property_schema, dict):
|
||||
_ensure_schema_constraints(property_schema)
|
||||
elif schema_type == "array":
|
||||
items = schema.get("items")
|
||||
if isinstance(items, dict):
|
||||
_ensure_schema_constraints(items)
|
||||
|
||||
|
||||
# Borrowed and adapted from openai_conversation component
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
stream: Any,
|
||||
remove_citations: bool = False,
|
||||
) -> AsyncGenerator[
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
]:
|
||||
"""Transform stream result into HA format."""
|
||||
last_summary_index = None
|
||||
last_role: Literal["assistant", "tool_result"] | None = None
|
||||
current_tool_call: ResponseFunctionToolCall | None = None
|
||||
|
||||
# Non-reasoning models don't follow our request to remove citations, so we remove
|
||||
# them manually here. They always follow the same pattern: the citation is always
|
||||
# in parentheses in Markdown format, the citation is always in a single delta event,
|
||||
# and sometimes the closing parenthesis is split into a separate delta event.
|
||||
remove_parentheses: bool = False
|
||||
citation_regexp = re.compile(r"\(\[([^\]]+)\]\((https?:\/\/[^\)]+)\)")
|
||||
|
||||
async for event in stream:
|
||||
event_type = getattr(event, "type", None)
|
||||
event_item = getattr(event, "item", None)
|
||||
event_item_type = getattr(event_item, "type", None) if event_item else None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Event[%s] | item: %s",
|
||||
event_type,
|
||||
event_item_type,
|
||||
)
|
||||
|
||||
if event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_ADDED:
|
||||
# Detect function_call even when it's a BaseLiteLLMOpenAIResponseObject
|
||||
if event_item_type == ResponseItemType.FUNCTION_CALL:
|
||||
# OpenAI has tool calls as individual events
|
||||
# while HA puts tool calls inside the assistant message.
|
||||
# We turn them into individual assistant content for HA
|
||||
# to ensure that tools are called as soon as possible.
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
current_tool_call = cast(ResponseFunctionToolCall, event.item)
|
||||
elif (
|
||||
event_item_type == ResponseItemType.MESSAGE
|
||||
or (
|
||||
event_item_type == ResponseItemType.REASONING
|
||||
and last_summary_index is not None
|
||||
) # Subsequent ResponseReasoningItem
|
||||
or last_role != "assistant"
|
||||
):
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_DONE:
|
||||
if event_item_type == ResponseItemType.REASONING:
|
||||
encrypted_content = getattr(event.item, "encrypted_content", None)
|
||||
summary = getattr(event.item, "summary", []) or []
|
||||
|
||||
yield {
|
||||
"native": ResponseReasoningItem(
|
||||
type="reasoning",
|
||||
id=event.item.id,
|
||||
summary=[],
|
||||
encrypted_content=encrypted_content,
|
||||
)
|
||||
}
|
||||
|
||||
last_summary_index = len(summary) - 1 if summary else None
|
||||
elif event_item_type == ResponseItemType.WEB_SEARCH_CALL:
|
||||
action = getattr(event.item, "action", None)
|
||||
if isinstance(action, dict):
|
||||
action_dict = action
|
||||
elif action is not None:
|
||||
action_dict = action.to_dict()
|
||||
else:
|
||||
action_dict = {}
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=event.item.id,
|
||||
tool_name="web_search_call",
|
||||
tool_args={"action": action_dict},
|
||||
external=True,
|
||||
)
|
||||
]
|
||||
}
|
||||
yield {
|
||||
"role": "tool_result",
|
||||
"tool_call_id": event.item.id,
|
||||
"tool_name": "web_search_call",
|
||||
"tool_result": {"status": event.item.status},
|
||||
}
|
||||
last_role = "tool_result"
|
||||
elif event_item_type == ResponseItemType.IMAGE:
|
||||
yield {"native": event.item}
|
||||
last_summary_index = -1 # Trigger new assistant message on next turn
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_TEXT_DELTA:
|
||||
data = event.delta
|
||||
if remove_parentheses:
|
||||
data = data.removeprefix(")")
|
||||
remove_parentheses = False
|
||||
elif remove_citations and (match := citation_regexp.search(data)):
|
||||
match_start, match_end = match.span()
|
||||
# remove leading space if any
|
||||
if data[match_start - 1 : match_start] == " ":
|
||||
match_start -= 1
|
||||
# remove closing parenthesis:
|
||||
if data[match_end : match_end + 1] == ")":
|
||||
match_end += 1
|
||||
else:
|
||||
remove_parentheses = True
|
||||
data = data[:match_start] + data[match_end:]
|
||||
if data:
|
||||
yield {"content": data}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.REASONING_SUMMARY_TEXT_DELTA:
|
||||
# OpenAI can output several reasoning summaries
|
||||
# in a single ResponseReasoningItem. We split them as separate
|
||||
# AssistantContent messages. Only last of them will have
|
||||
# the reasoning `native` field set.
|
||||
if (
|
||||
last_summary_index is not None
|
||||
and event.summary_index != last_summary_index
|
||||
):
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = event.summary_index
|
||||
yield {"thinking_content": event.delta}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DELTA:
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.arguments += event.delta
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.WEB_SEARCH_CALL_SEARCHING:
|
||||
yield {"role": "assistant"}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DONE:
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.status = "completed"
|
||||
|
||||
raw_args = json.loads(current_tool_call.arguments)
|
||||
for key in ("area", "floor"):
|
||||
if key in raw_args and not raw_args[key]:
|
||||
# Remove keys that are "" or None
|
||||
raw_args.pop(key, None)
|
||||
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_tool_call.call_id,
|
||||
tool_name=current_tool_call.name,
|
||||
tool_args=raw_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_COMPLETED:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_INCOMPLETE:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (
|
||||
event.response.incomplete_details
|
||||
and event.response.incomplete_details.reason
|
||||
):
|
||||
reason: str = event.response.incomplete_details.reason
|
||||
else:
|
||||
reason = "unknown reason"
|
||||
|
||||
if reason == "max_output_tokens":
|
||||
reason = "max output tokens reached"
|
||||
elif reason == "content_filter":
|
||||
reason = "content filter triggered"
|
||||
|
||||
raise HomeAssistantError(f"OpenAI response incomplete: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_FAILED:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
reason = "unknown reason"
|
||||
if event.response.error is not None:
|
||||
reason = event.response.error.message
|
||||
raise HomeAssistantError(f"OpenAI response failed: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.ERROR:
|
||||
raise HomeAssistantError(f"OpenAI response error: {event.message}")
|
||||
|
||||
|
||||
class BaseCloudLLMEntity(Entity):
|
||||
"""Cloud LLM conversation agent."""
|
||||
|
||||
def __init__(self, cloud: Cloud[CloudClient], config_entry: ConfigEntry) -> None:
|
||||
"""Initialize the entity."""
|
||||
self._cloud = cloud
|
||||
self._entry = config_entry
|
||||
|
||||
async def _prepare_chat_for_generation(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
messages: ResponseInputParam,
|
||||
response_format: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Prepare kwargs for Cloud LLM from the chat log."""
|
||||
|
||||
last_content: Any = chat_log.content[-1]
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
files = await self._async_prepare_files_for_prompt(last_content.attachments)
|
||||
current_content = last_content.content
|
||||
last_content = [*(current_content or []), *files]
|
||||
|
||||
tools: list[ToolParam] = []
|
||||
tool_choice: str | None = None
|
||||
|
||||
if chat_log.llm_api:
|
||||
ha_tools: list[ToolParam] = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
if ha_tools:
|
||||
if not chat_log.unresponded_tool_results:
|
||||
tools = ha_tools
|
||||
tool_choice = "auto"
|
||||
else:
|
||||
tools = []
|
||||
tool_choice = "none"
|
||||
|
||||
web_search = WebSearchToolParam(
|
||||
type="web_search",
|
||||
search_context_size="medium",
|
||||
)
|
||||
tools.append(web_search)
|
||||
|
||||
response_kwargs: dict[str, Any] = {
|
||||
"messages": messages,
|
||||
"conversation_id": chat_log.conversation_id,
|
||||
}
|
||||
|
||||
if response_format is not None:
|
||||
response_kwargs["response_format"] = response_format
|
||||
if tools is not None:
|
||||
response_kwargs["tools"] = tools
|
||||
if tool_choice is not None:
|
||||
response_kwargs["tool_choice"] = tool_choice
|
||||
|
||||
response_kwargs["stream"] = True
|
||||
|
||||
return response_kwargs
|
||||
|
||||
async def _async_prepare_files_for_prompt(
|
||||
self,
|
||||
attachments: list[conversation.Attachment],
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Prepare files for multimodal prompts."""
|
||||
|
||||
def prepare() -> list[dict[str, Any]]:
|
||||
content: list[dict[str, Any]] = []
|
||||
for attachment in attachments:
|
||||
mime_type = attachment.mime_type
|
||||
path = attachment.path
|
||||
if not path.exists():
|
||||
raise HomeAssistantError(f"`{path}` does not exist")
|
||||
|
||||
data = base64.b64encode(path.read_bytes()).decode("utf-8")
|
||||
if mime_type and mime_type.startswith("image/"):
|
||||
content.append(
|
||||
{
|
||||
"type": "input_image",
|
||||
"image_url": f"data:{mime_type};base64,{data}",
|
||||
"detail": "auto",
|
||||
}
|
||||
)
|
||||
elif mime_type and mime_type.startswith("application/pdf"):
|
||||
content.append(
|
||||
{
|
||||
"type": "input_file",
|
||||
"filename": str(path.name),
|
||||
"file_data": f"data:{mime_type};base64,{data}",
|
||||
}
|
||||
)
|
||||
else:
|
||||
raise HomeAssistantError(
|
||||
"Only images and PDF are currently supported as attachments"
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
return await self.hass.async_add_executor_job(prepare)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
type: Literal["ai_task", "conversation"],
|
||||
chat_log: conversation.ChatLog,
|
||||
structure_name: str | None = None,
|
||||
structure: vol.Schema | None = None,
|
||||
) -> None:
|
||||
"""Generate a response for the chat log."""
|
||||
|
||||
for _ in range(_MAX_TOOL_ITERATIONS):
|
||||
response_format: dict[str, Any] | None = None
|
||||
if structure and structure_name:
|
||||
response_format = {
|
||||
"type": "json_schema",
|
||||
"json_schema": {
|
||||
"name": slugify(structure_name),
|
||||
"schema": _format_structured_output(
|
||||
structure, chat_log.llm_api
|
||||
),
|
||||
"strict": True,
|
||||
},
|
||||
}
|
||||
|
||||
messages = _convert_content_to_param(chat_log.content)
|
||||
|
||||
response_kwargs = await self._prepare_chat_for_generation(
|
||||
chat_log,
|
||||
messages,
|
||||
response_format,
|
||||
)
|
||||
|
||||
try:
|
||||
if type == "conversation":
|
||||
raw_stream = await self._cloud.llm.async_process_conversation(
|
||||
**response_kwargs,
|
||||
)
|
||||
else:
|
||||
raw_stream = await self._cloud.llm.async_generate_data(
|
||||
**response_kwargs,
|
||||
)
|
||||
|
||||
messages.extend(
|
||||
_convert_content_to_param(
|
||||
[
|
||||
content
|
||||
async for content in chat_log.async_add_delta_content_stream(
|
||||
self.entity_id,
|
||||
_transform_stream(
|
||||
chat_log,
|
||||
raw_stream,
|
||||
True,
|
||||
),
|
||||
)
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
except LLMAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("Cloud LLM authentication failed") from err
|
||||
except LLMRateLimitError as err:
|
||||
raise HomeAssistantError("Cloud LLM is rate limited") from err
|
||||
except LLMResponseError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
except LLMServiceError as err:
|
||||
raise HomeAssistantError("Error talking to Cloud LLM") from err
|
||||
except LLMError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Helpers for the cloud component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import deque
|
||||
import logging
|
||||
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.5.1"],
|
||||
"requirements": ["hass-nabucasa==1.6.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
{
|
||||
"entity": {
|
||||
"ai_task": {
|
||||
"cloud_ai": {
|
||||
"name": "Home Assistant Cloud AI"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"backup_size_too_large": {
|
||||
"message": "The backup size of {size}GB is too large to be uploaded to Home Assistant Cloud."
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import AsyncGenerator, AsyncIterable, Callable, Generator
|
||||
from contextlib import contextmanager
|
||||
from contextvars import ContextVar
|
||||
from dataclasses import asdict, dataclass, field, replace
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TypedDict, cast
|
||||
@@ -16,14 +17,18 @@ import voluptuous as vol
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import chat_session, frame, intent, llm, template
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
from . import trace
|
||||
from .const import ChatLogEventType
|
||||
from .models import ConversationInput, ConversationResult
|
||||
|
||||
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
|
||||
|
||||
DATA_SUBSCRIPTIONS: HassKey[
|
||||
list[Callable[[str, ChatLogEventType, dict[str, Any]], None]]
|
||||
] = HassKey("conversation_chat_log_subscriptions")
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
current_chat_log: ContextVar[ChatLog | None] = ContextVar(
|
||||
@@ -31,6 +36,40 @@ current_chat_log: ContextVar[ChatLog | None] = ContextVar(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_subscribe_chat_logs(
|
||||
hass: HomeAssistant,
|
||||
callback_func: Callable[[str, ChatLogEventType, dict[str, Any]], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to all chat logs."""
|
||||
subscriptions = hass.data.get(DATA_SUBSCRIPTIONS)
|
||||
if subscriptions is None:
|
||||
subscriptions = []
|
||||
hass.data[DATA_SUBSCRIPTIONS] = subscriptions
|
||||
|
||||
subscriptions.append(callback_func)
|
||||
|
||||
@callback
|
||||
def unsubscribe() -> None:
|
||||
"""Unsubscribe from chat logs."""
|
||||
subscriptions.remove(callback_func)
|
||||
|
||||
return unsubscribe
|
||||
|
||||
|
||||
@callback
|
||||
def _async_notify_subscribers(
|
||||
hass: HomeAssistant,
|
||||
conversation_id: str,
|
||||
event_type: ChatLogEventType,
|
||||
data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Notify subscribers of a chat log event."""
|
||||
if subscriptions := hass.data.get(DATA_SUBSCRIPTIONS):
|
||||
for callback_func in subscriptions:
|
||||
callback_func(conversation_id, event_type, data)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def async_get_chat_log(
|
||||
hass: HomeAssistant,
|
||||
@@ -63,6 +102,8 @@ def async_get_chat_log(
|
||||
all_chat_logs = {}
|
||||
hass.data[DATA_CHAT_LOGS] = all_chat_logs
|
||||
|
||||
is_new_log = session.conversation_id not in all_chat_logs
|
||||
|
||||
if chat_log := all_chat_logs.get(session.conversation_id):
|
||||
chat_log = replace(chat_log, content=chat_log.content.copy())
|
||||
else:
|
||||
@@ -71,6 +112,15 @@ def async_get_chat_log(
|
||||
if chat_log_delta_listener:
|
||||
chat_log.delta_listener = chat_log_delta_listener
|
||||
|
||||
# Fire CREATED event for new chat logs before any content is added
|
||||
if is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.CREATED,
|
||||
{"chat_log": chat_log.as_dict()},
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
chat_log.async_add_user_content(UserContent(content=user_input.text))
|
||||
|
||||
@@ -84,14 +134,28 @@ def async_get_chat_log(
|
||||
LOGGER.debug(
|
||||
"Chat Log opened but no assistant message was added, ignoring update"
|
||||
)
|
||||
# If this was a new log but nothing was added, fire DELETED to clean up
|
||||
if is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.DELETED,
|
||||
{},
|
||||
)
|
||||
return
|
||||
|
||||
if session.conversation_id not in all_chat_logs:
|
||||
if is_new_log:
|
||||
|
||||
@callback
|
||||
def do_cleanup() -> None:
|
||||
"""Handle cleanup."""
|
||||
all_chat_logs.pop(session.conversation_id)
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.DELETED,
|
||||
{},
|
||||
)
|
||||
|
||||
session.async_on_cleanup(do_cleanup)
|
||||
|
||||
@@ -100,6 +164,16 @@ def async_get_chat_log(
|
||||
|
||||
all_chat_logs[session.conversation_id] = chat_log
|
||||
|
||||
# For new logs, CREATED was already fired before content was added
|
||||
# For existing logs, fire UPDATED
|
||||
if not is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.UPDATED,
|
||||
{"chat_log": chat_log.as_dict()},
|
||||
)
|
||||
|
||||
|
||||
class ConverseError(HomeAssistantError):
|
||||
"""Error during initialization of conversation.
|
||||
@@ -129,6 +203,15 @@ class SystemContent:
|
||||
|
||||
role: Literal["system"] = field(init=False, default="system")
|
||||
content: str
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
return {
|
||||
"role": self.role,
|
||||
"content": self.content,
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -138,6 +221,20 @@ class UserContent:
|
||||
role: Literal["user"] = field(init=False, default="user")
|
||||
content: str
|
||||
attachments: list[Attachment] | None = field(default=None)
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
result: dict[str, Any] = {
|
||||
"role": self.role,
|
||||
"content": self.content,
|
||||
"created": self.created,
|
||||
}
|
||||
if self.attachments:
|
||||
result["attachments"] = [
|
||||
attachment.as_dict() for attachment in self.attachments
|
||||
]
|
||||
return result
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -153,6 +250,14 @@ class Attachment:
|
||||
path: Path
|
||||
"""Path to the attachment on disk."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the attachment."""
|
||||
return {
|
||||
"media_content_id": self.media_content_id,
|
||||
"mime_type": self.mime_type,
|
||||
"path": str(self.path),
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AssistantContent:
|
||||
@@ -164,6 +269,22 @@ class AssistantContent:
|
||||
thinking_content: str | None = None
|
||||
tool_calls: list[llm.ToolInput] | None = None
|
||||
native: Any = None
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
result: dict[str, Any] = {
|
||||
"role": self.role,
|
||||
"agent_id": self.agent_id,
|
||||
"created": self.created,
|
||||
}
|
||||
if self.content:
|
||||
result["content"] = self.content
|
||||
if self.thinking_content:
|
||||
result["thinking_content"] = self.thinking_content
|
||||
if self.tool_calls:
|
||||
result["tool_calls"] = self.tool_calls
|
||||
return result
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -175,6 +296,18 @@ class ToolResultContent:
|
||||
tool_call_id: str
|
||||
tool_name: str
|
||||
tool_result: JsonObjectType
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
return {
|
||||
"role": self.role,
|
||||
"agent_id": self.agent_id,
|
||||
"tool_call_id": self.tool_call_id,
|
||||
"tool_name": self.tool_name,
|
||||
"tool_result": self.tool_result,
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
|
||||
type Content = SystemContent | UserContent | AssistantContent | ToolResultContent
|
||||
@@ -210,6 +343,16 @@ class ChatLog:
|
||||
llm_api: llm.APIInstance | None = None
|
||||
delta_listener: Callable[[ChatLog, dict], None] | None = None
|
||||
llm_input_provided_index = 0
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the chat log."""
|
||||
return {
|
||||
"conversation_id": self.conversation_id,
|
||||
"continue_conversation": self.continue_conversation,
|
||||
"content": [c.as_dict() for c in self.content],
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
@property
|
||||
def continue_conversation(self) -> bool:
|
||||
@@ -241,6 +384,12 @@ class ChatLog:
|
||||
"""Add user content to the log."""
|
||||
LOGGER.debug("Adding user content: %s", content)
|
||||
self.content.append(content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{"content": content.as_dict()},
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_add_assistant_content_without_tools(
|
||||
@@ -259,6 +408,12 @@ class ChatLog:
|
||||
):
|
||||
raise ValueError("Non-external tool calls not allowed")
|
||||
self.content.append(content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{"content": content.as_dict()},
|
||||
)
|
||||
|
||||
async def async_add_assistant_content(
|
||||
self,
|
||||
@@ -317,6 +472,14 @@ class ChatLog:
|
||||
tool_result=tool_result,
|
||||
)
|
||||
self.content.append(response_content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{
|
||||
"content": response_content.as_dict(),
|
||||
},
|
||||
)
|
||||
yield response_content
|
||||
|
||||
async def async_add_delta_content_stream(
|
||||
@@ -593,6 +756,12 @@ class ChatLog:
|
||||
self.llm_api = llm_api
|
||||
self.extra_system_prompt = extra_system_prompt
|
||||
self.content[0] = SystemContent(content=prompt)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.UPDATED,
|
||||
{"chat_log": self.as_dict()},
|
||||
)
|
||||
|
||||
LOGGER.debug("Prompt: %s", self.content)
|
||||
LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntFlag
|
||||
from enum import IntFlag, StrEnum
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
@@ -34,3 +34,13 @@ class ConversationEntityFeature(IntFlag):
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
|
||||
|
||||
class ChatLogEventType(StrEnum):
|
||||
"""Chat log event type."""
|
||||
|
||||
INITIAL_STATE = "initial_state"
|
||||
CREATED = "created"
|
||||
UPDATED = "updated"
|
||||
DELETED = "deleted"
|
||||
CONTENT_ADDED = "content_added"
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.components import http, websocket_api
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.util import language as language_util
|
||||
|
||||
from .agent_manager import (
|
||||
@@ -20,7 +21,8 @@ from .agent_manager import (
|
||||
async_get_agent,
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT
|
||||
from .chat_log import DATA_CHAT_LOGS, async_get_chat_log, async_subscribe_chat_logs
|
||||
from .const import DATA_COMPONENT, ChatLogEventType
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput
|
||||
|
||||
@@ -35,6 +37,8 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_list_sentences)
|
||||
websocket_api.async_register_command(hass, websocket_hass_agent_debug)
|
||||
websocket_api.async_register_command(hass, websocket_hass_agent_language_scores)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_chat_log)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_chat_log_index)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
@@ -265,3 +269,114 @@ class ConversationProcessView(http.HomeAssistantView):
|
||||
)
|
||||
|
||||
return self.json(result.as_dict())
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/chat_log/subscribe",
|
||||
vol.Required("conversation_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
def websocket_subscribe_chat_log(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a chat log."""
|
||||
msg_id = msg["id"]
|
||||
subscribed_conversation = msg["conversation_id"]
|
||||
|
||||
chat_logs = hass.data.get(DATA_CHAT_LOGS)
|
||||
|
||||
if not chat_logs or subscribed_conversation not in chat_logs:
|
||||
connection.send_error(
|
||||
msg_id,
|
||||
websocket_api.ERR_NOT_FOUND,
|
||||
"Conversation chat log not found",
|
||||
)
|
||||
return
|
||||
|
||||
@callback
|
||||
def forward_events(conversation_id: str, event_type: str, data: dict) -> None:
|
||||
"""Forward chat log events to websocket connection."""
|
||||
if conversation_id != subscribed_conversation:
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"conversation_id": conversation_id,
|
||||
"event_type": event_type,
|
||||
"data": data,
|
||||
},
|
||||
)
|
||||
|
||||
if event_type == ChatLogEventType.DELETED:
|
||||
unsubscribe()
|
||||
del connection.subscriptions[msg_id]
|
||||
|
||||
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
|
||||
connection.subscriptions[msg_id] = unsubscribe
|
||||
connection.send_result(msg_id)
|
||||
|
||||
with (
|
||||
async_get_chat_session(hass, subscribed_conversation) as session,
|
||||
async_get_chat_log(hass, session) as chat_log,
|
||||
):
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"event_type": ChatLogEventType.INITIAL_STATE,
|
||||
"data": chat_log.as_dict(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/chat_log/subscribe_index",
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
def websocket_subscribe_chat_log_index(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a chat log."""
|
||||
msg_id = msg["id"]
|
||||
|
||||
@callback
|
||||
def forward_events(
|
||||
conversation_id: str, event_type: ChatLogEventType, data: dict
|
||||
) -> None:
|
||||
"""Forward chat log events to websocket connection."""
|
||||
if event_type not in (ChatLogEventType.CREATED, ChatLogEventType.DELETED):
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"conversation_id": conversation_id,
|
||||
"event_type": event_type,
|
||||
"data": data,
|
||||
},
|
||||
)
|
||||
|
||||
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
|
||||
connection.subscriptions[msg["id"]] = unsubscribe
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
chat_logs = hass.data.get(DATA_CHAT_LOGS)
|
||||
|
||||
if not chat_logs:
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"event_type": ChatLogEventType.INITIAL_STATE,
|
||||
"data": [c.as_dict() for c in chat_logs.values()],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.24"]
|
||||
}
|
||||
|
||||
@@ -108,5 +108,34 @@
|
||||
"toggle_cover_tilt": {
|
||||
"service": "mdi:arrow-top-right-bottom-left"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"awning_opened": {
|
||||
"trigger": "mdi:awning-outline"
|
||||
},
|
||||
"blind_opened": {
|
||||
"trigger": "mdi:blinds-horizontal"
|
||||
},
|
||||
"curtain_opened": {
|
||||
"trigger": "mdi:curtains"
|
||||
},
|
||||
"door_opened": {
|
||||
"trigger": "mdi:door-open"
|
||||
},
|
||||
"garage_opened": {
|
||||
"trigger": "mdi:garage-open"
|
||||
},
|
||||
"gate_opened": {
|
||||
"trigger": "mdi:gate-open"
|
||||
},
|
||||
"shade_opened": {
|
||||
"trigger": "mdi:roller-shade"
|
||||
},
|
||||
"shutter_opened": {
|
||||
"trigger": "mdi:window-shutter-open"
|
||||
},
|
||||
"window_opened": {
|
||||
"trigger": "mdi:window-open"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,16 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description_awning": "The behavior of the targeted awnings to trigger on.",
|
||||
"trigger_behavior_description_blind": "The behavior of the targeted blinds to trigger on.",
|
||||
"trigger_behavior_description_curtain": "The behavior of the targeted curtains to trigger on.",
|
||||
"trigger_behavior_description_door": "The behavior of the targeted doors to trigger on.",
|
||||
"trigger_behavior_description_garage": "The behavior of the targeted garage doors to trigger on.",
|
||||
"trigger_behavior_description_gate": "The behavior of the targeted gates to trigger on.",
|
||||
"trigger_behavior_description_shade": "The behavior of the targeted shades to trigger on.",
|
||||
"trigger_behavior_description_shutter": "The behavior of the targeted shutters to trigger on.",
|
||||
"trigger_behavior_description_window": "The behavior of the targeted windows to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"close": "Close {entity_name}",
|
||||
@@ -82,6 +94,15 @@
|
||||
"name": "Window"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"close_cover": {
|
||||
"description": "Closes a cover.",
|
||||
@@ -136,5 +157,142 @@
|
||||
"name": "Toggle tilt"
|
||||
}
|
||||
},
|
||||
"title": "Cover"
|
||||
"title": "Cover",
|
||||
"triggers": {
|
||||
"awning_opened": {
|
||||
"description": "Triggers when an awning opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::awning_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_awning%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the awnings to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When an awning opens"
|
||||
},
|
||||
"blind_opened": {
|
||||
"description": "Triggers when a blind opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::blind_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_blind%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the blinds to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a blind opens"
|
||||
},
|
||||
"curtain_opened": {
|
||||
"description": "Triggers when a curtain opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::curtain_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_curtain%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the curtains to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a curtain opens"
|
||||
},
|
||||
"door_opened": {
|
||||
"description": "Triggers when a door opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::door_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_door%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the doors to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a door opens"
|
||||
},
|
||||
"garage_opened": {
|
||||
"description": "Triggers when a garage door opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::garage_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_garage%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the garage doors to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a garage door opens"
|
||||
},
|
||||
"gate_opened": {
|
||||
"description": "Triggers when a gate opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::gate_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_gate%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the gates to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a gate opens"
|
||||
},
|
||||
"shade_opened": {
|
||||
"description": "Triggers when a shade opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::shade_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_shade%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the shades to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a shade opens"
|
||||
},
|
||||
"shutter_opened": {
|
||||
"description": "Triggers when a shutter opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::shutter_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_shutter%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the shutters to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a shutter opens"
|
||||
},
|
||||
"window_opened": {
|
||||
"description": "Triggers when a window opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::window_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_window%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the windows to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a window opens"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
116
homeassistant/components/cover/trigger.py
Normal file
116
homeassistant/components/cover/trigger.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Provides triggers for covers."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
)
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from . import ATTR_CURRENT_POSITION, CoverDeviceClass, CoverState
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_FULLY_OPENED: Final = "fully_opened"
|
||||
|
||||
COVER_OPENED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(ATTR_FULLY_OPENED, default=False): bool,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_device_class_or_undefined(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> str | None | UndefinedType:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return get_device_class(hass, entity_id)
|
||||
except HomeAssistantError:
|
||||
return UNDEFINED
|
||||
|
||||
|
||||
class CoverOpenedClosedTrigger(EntityTriggerBase):
|
||||
"""Class for cover opened and closed triggers."""
|
||||
|
||||
_attribute: str = ATTR_CURRENT_POSITION
|
||||
_attribute_value: int | None = None
|
||||
_device_class: CoverDeviceClass | None
|
||||
_domain: str = DOMAIN
|
||||
_to_states: set[str]
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
if state.state not in self._to_states:
|
||||
return False
|
||||
if (
|
||||
self._attribute_value is not None
|
||||
and (value := state.attributes.get(self._attribute)) is not None
|
||||
and value != self._attribute_value
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if get_device_class_or_undefined(self._hass, entity_id)
|
||||
== self._device_class
|
||||
}
|
||||
|
||||
|
||||
class CoverOpenedTrigger(CoverOpenedClosedTrigger):
|
||||
"""Class for cover opened triggers."""
|
||||
|
||||
_schema = COVER_OPENED_TRIGGER_SCHEMA
|
||||
_to_states = {CoverState.OPEN, CoverState.OPENING}
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
if self._options.get(ATTR_FULLY_OPENED):
|
||||
self._attribute_value = 100
|
||||
|
||||
|
||||
def make_cover_opened_trigger(
|
||||
device_class: CoverDeviceClass | None,
|
||||
) -> type[CoverOpenedTrigger]:
|
||||
"""Create an entity state attribute trigger class."""
|
||||
|
||||
class CustomTrigger(CoverOpenedTrigger):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_device_class = device_class
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"awning_opened": make_cover_opened_trigger(CoverDeviceClass.AWNING),
|
||||
"blind_opened": make_cover_opened_trigger(CoverDeviceClass.BLIND),
|
||||
"curtain_opened": make_cover_opened_trigger(CoverDeviceClass.CURTAIN),
|
||||
"door_opened": make_cover_opened_trigger(CoverDeviceClass.DOOR),
|
||||
"garage_opened": make_cover_opened_trigger(CoverDeviceClass.GARAGE),
|
||||
"gate_opened": make_cover_opened_trigger(CoverDeviceClass.GATE),
|
||||
"shade_opened": make_cover_opened_trigger(CoverDeviceClass.SHADE),
|
||||
"shutter_opened": make_cover_opened_trigger(CoverDeviceClass.SHUTTER),
|
||||
"window_opened": make_cover_opened_trigger(CoverDeviceClass.WINDOW),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for covers."""
|
||||
return TRIGGERS
|
||||
79
homeassistant/components/cover/triggers.yaml
Normal file
79
homeassistant/components/cover/triggers.yaml
Normal file
@@ -0,0 +1,79 @@
|
||||
.trigger_common_fields: &trigger_common_fields
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
fully_opened:
|
||||
required: true
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
|
||||
awning_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: awning
|
||||
|
||||
blind_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: blind
|
||||
|
||||
curtain_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: curtain
|
||||
|
||||
door_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: door
|
||||
|
||||
garage_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: garage
|
||||
|
||||
gate_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: gate
|
||||
|
||||
shade_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: shade
|
||||
|
||||
shutter_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: shutter
|
||||
|
||||
window_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: window
|
||||
@@ -6,5 +6,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["debugpy==1.8.16"]
|
||||
"requirements": ["debugpy==1.8.17"]
|
||||
}
|
||||
|
||||
@@ -278,11 +278,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> boo
|
||||
for keypad in elk.keypads:
|
||||
keypad.add_callback(_keypad_changed)
|
||||
|
||||
sync_success = False
|
||||
try:
|
||||
if not await async_wait_for_elk_to_sync(elk, LOGIN_TIMEOUT, SYNC_TIMEOUT):
|
||||
return False
|
||||
await ElkSyncWaiter(elk, LOGIN_TIMEOUT, SYNC_TIMEOUT).async_wait()
|
||||
sync_success = True
|
||||
except LoginFailed:
|
||||
_LOGGER.error("ElkM1 login failed for %s", conf[CONF_HOST])
|
||||
return False
|
||||
except TimeoutError as exc:
|
||||
raise ConfigEntryNotReady(f"Timed out connecting to {conf[CONF_HOST]}") from exc
|
||||
finally:
|
||||
if not sync_success:
|
||||
elk.disconnect()
|
||||
|
||||
elk_temp_unit = elk.panel.temperature_units
|
||||
if elk_temp_unit == "C":
|
||||
@@ -321,48 +328,75 @@ async def async_unload_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> bo
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_wait_for_elk_to_sync(
|
||||
elk: Elk,
|
||||
login_timeout: int,
|
||||
sync_timeout: int,
|
||||
) -> bool:
|
||||
"""Wait until the elk has finished sync. Can fail login or timeout."""
|
||||
class LoginFailed(Exception):
|
||||
"""Raised when login to ElkM1 fails."""
|
||||
|
||||
sync_event = asyncio.Event()
|
||||
login_event = asyncio.Event()
|
||||
|
||||
success = True
|
||||
class ElkSyncWaiter:
|
||||
"""Wait for ElkM1 to sync."""
|
||||
|
||||
def login_status(succeeded: bool) -> None:
|
||||
nonlocal success
|
||||
def __init__(self, elk: Elk, login_timeout: int, sync_timeout: int) -> None:
|
||||
"""Initialize the sync waiter."""
|
||||
self._elk = elk
|
||||
self._login_timeout = login_timeout
|
||||
self._sync_timeout = sync_timeout
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._sync_future: asyncio.Future[None] = self._loop.create_future()
|
||||
self._login_future: asyncio.Future[None] = self._loop.create_future()
|
||||
|
||||
success = succeeded
|
||||
@callback
|
||||
def _async_set_future_if_not_done(self, future: asyncio.Future[None]) -> None:
|
||||
"""Set the future result if not already done."""
|
||||
if not future.done():
|
||||
future.set_result(None)
|
||||
|
||||
@callback
|
||||
def _async_login_status(self, succeeded: bool) -> None:
|
||||
"""Handle login status callback."""
|
||||
if succeeded:
|
||||
_LOGGER.debug("ElkM1 login succeeded")
|
||||
login_event.set()
|
||||
self._async_set_future_if_not_done(self._login_future)
|
||||
else:
|
||||
elk.disconnect()
|
||||
_LOGGER.error("ElkM1 login failed; invalid username or password")
|
||||
login_event.set()
|
||||
sync_event.set()
|
||||
self._async_set_exception_if_not_done(self._login_future, LoginFailed)
|
||||
|
||||
def sync_complete() -> None:
|
||||
sync_event.set()
|
||||
@callback
|
||||
def _async_set_exception_if_not_done(
|
||||
self, future: asyncio.Future[None], exception: type[Exception]
|
||||
) -> None:
|
||||
"""Set an exception on the future if not already done."""
|
||||
if not future.done():
|
||||
future.set_exception(exception())
|
||||
|
||||
@callback
|
||||
def _async_sync_complete(self) -> None:
|
||||
"""Handle sync complete callback."""
|
||||
self._async_set_future_if_not_done(self._sync_future)
|
||||
|
||||
async def async_wait(self) -> None:
|
||||
"""Wait for login and sync to complete.
|
||||
|
||||
Raises LoginFailed if login fails.
|
||||
Raises TimeoutError if login or sync times out.
|
||||
"""
|
||||
self._elk.add_handler("login", self._async_login_status)
|
||||
self._elk.add_handler("sync_complete", self._async_sync_complete)
|
||||
|
||||
elk.add_handler("login", login_status)
|
||||
elk.add_handler("sync_complete", sync_complete)
|
||||
for name, event, timeout in (
|
||||
("login", login_event, login_timeout),
|
||||
("sync_complete", sync_event, sync_timeout),
|
||||
):
|
||||
_LOGGER.debug("Waiting for %s event for %s seconds", name, timeout)
|
||||
try:
|
||||
async with asyncio.timeout(timeout):
|
||||
await event.wait()
|
||||
except TimeoutError:
|
||||
_LOGGER.debug("Timed out waiting for %s event", name)
|
||||
elk.disconnect()
|
||||
raise
|
||||
_LOGGER.debug("Received %s event", name)
|
||||
for name, future, timeout in (
|
||||
("login", self._login_future, self._login_timeout),
|
||||
("sync_complete", self._sync_future, self._sync_timeout),
|
||||
):
|
||||
_LOGGER.debug("Waiting for %s event for %s seconds", name, timeout)
|
||||
handle = self._loop.call_later(
|
||||
timeout, self._async_set_exception_if_not_done, future, TimeoutError
|
||||
)
|
||||
try:
|
||||
await future
|
||||
finally:
|
||||
handle.cancel()
|
||||
|
||||
return success
|
||||
_LOGGER.debug("Received %s event", name)
|
||||
finally:
|
||||
self._elk.remove_handler("login", self._async_login_status)
|
||||
self._elk.remove_handler("sync_complete", self._async_sync_complete)
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.helpers.typing import DiscoveryInfoType, VolDictType
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.network import is_ip_address
|
||||
|
||||
from . import async_wait_for_elk_to_sync, hostname_from_url
|
||||
from . import ElkSyncWaiter, LoginFailed, hostname_from_url
|
||||
from .const import CONF_AUTO_CONFIGURE, DISCOVER_SCAN_TIMEOUT, DOMAIN, LOGIN_TIMEOUT
|
||||
from .discovery import (
|
||||
_short_mac,
|
||||
@@ -89,8 +89,9 @@ async def validate_input(data: dict[str, str], mac: str | None) -> dict[str, str
|
||||
elk.connect()
|
||||
|
||||
try:
|
||||
if not await async_wait_for_elk_to_sync(elk, LOGIN_TIMEOUT, VALIDATE_TIMEOUT):
|
||||
raise InvalidAuth
|
||||
await ElkSyncWaiter(elk, LOGIN_TIMEOUT, VALIDATE_TIMEOUT).async_wait()
|
||||
except LoginFailed as exc:
|
||||
raise InvalidAuth from exc
|
||||
finally:
|
||||
elk.disconnect()
|
||||
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["elkm1_lib"],
|
||||
"requirements": ["elkm1-lib==2.2.12"]
|
||||
"requirements": ["elkm1-lib==2.2.13"]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections import Counter
|
||||
from collections.abc import Awaitable, Callable
|
||||
from typing import Literal, NotRequired, TypedDict
|
||||
from typing import Any, Literal, NotRequired, TypedDict
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.helpers import config_validation as cv, singleton, storage
|
||||
from .const import DOMAIN
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_MINOR_VERSION = 2
|
||||
STORAGE_KEY = DOMAIN
|
||||
|
||||
|
||||
@@ -164,6 +165,7 @@ class EnergyPreferences(TypedDict):
|
||||
|
||||
energy_sources: list[SourceType]
|
||||
device_consumption: list[DeviceConsumption]
|
||||
device_consumption_water: NotRequired[list[DeviceConsumption]]
|
||||
|
||||
|
||||
class EnergyPreferencesUpdate(EnergyPreferences, total=False):
|
||||
@@ -328,14 +330,31 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
|
||||
"""Energy preferences store with migration support."""
|
||||
|
||||
async def _async_migrate_func(
|
||||
self,
|
||||
old_major_version: int,
|
||||
old_minor_version: int,
|
||||
old_data: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Migrate to the new version."""
|
||||
data = old_data
|
||||
if old_major_version == 1 and old_minor_version < 2:
|
||||
# Add device_consumption_water field if it doesn't exist
|
||||
data.setdefault("device_consumption_water", [])
|
||||
return data
|
||||
|
||||
|
||||
class EnergyManager:
|
||||
"""Manage the instance energy prefs."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize energy manager."""
|
||||
self._hass = hass
|
||||
self._store = storage.Store[EnergyPreferences](
|
||||
hass, STORAGE_VERSION, STORAGE_KEY
|
||||
self._store = _EnergyPreferencesStore(
|
||||
hass, STORAGE_VERSION, STORAGE_KEY, minor_version=STORAGE_MINOR_VERSION
|
||||
)
|
||||
self.data: EnergyPreferences | None = None
|
||||
self._update_listeners: list[Callable[[], Awaitable]] = []
|
||||
@@ -350,6 +369,7 @@ class EnergyManager:
|
||||
return {
|
||||
"energy_sources": [],
|
||||
"device_consumption": [],
|
||||
"device_consumption_water": [],
|
||||
}
|
||||
|
||||
async def async_update(self, update: EnergyPreferencesUpdate) -> None:
|
||||
@@ -362,6 +382,7 @@ class EnergyManager:
|
||||
for key in (
|
||||
"energy_sources",
|
||||
"device_consumption",
|
||||
"device_consumption_water",
|
||||
):
|
||||
if key in update:
|
||||
data[key] = update[key]
|
||||
|
||||
@@ -153,6 +153,9 @@ class EnergyPreferencesValidation:
|
||||
|
||||
energy_sources: list[ValidationIssues] = dataclasses.field(default_factory=list)
|
||||
device_consumption: list[ValidationIssues] = dataclasses.field(default_factory=list)
|
||||
device_consumption_water: list[ValidationIssues] = dataclasses.field(
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
def as_dict(self) -> dict:
|
||||
"""Return dictionary version."""
|
||||
@@ -165,6 +168,10 @@ class EnergyPreferencesValidation:
|
||||
[dataclasses.asdict(issue) for issue in issues.issues.values()]
|
||||
for issues in self.device_consumption
|
||||
],
|
||||
"device_consumption_water": [
|
||||
[dataclasses.asdict(issue) for issue in issues.issues.values()]
|
||||
for issues in self.device_consumption_water
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -742,6 +749,23 @@ async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
|
||||
)
|
||||
)
|
||||
|
||||
for device in manager.data.get("device_consumption_water", []):
|
||||
device_result = ValidationIssues()
|
||||
result.device_consumption_water.append(device_result)
|
||||
wanted_statistics_metadata.add(device["stat_consumption"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
device["stat_consumption"],
|
||||
WATER_USAGE_DEVICE_CLASSES,
|
||||
WATER_USAGE_UNITS,
|
||||
WATER_UNIT_ERROR,
|
||||
device_result,
|
||||
)
|
||||
)
|
||||
|
||||
# Fetch the needed statistics metadata
|
||||
statistics_metadata.update(
|
||||
await recorder.get_instance(hass).async_add_executor_job(
|
||||
|
||||
@@ -129,6 +129,7 @@ def ws_get_prefs(
|
||||
vol.Required("type"): "energy/save_prefs",
|
||||
vol.Optional("energy_sources"): ENERGY_SOURCE_SCHEMA,
|
||||
vol.Optional("device_consumption"): [DEVICE_CONSUMPTION_SCHEMA],
|
||||
vol.Optional("device_consumption_water"): [DEVICE_CONSUMPTION_SCHEMA],
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
|
||||
@@ -90,7 +90,7 @@ def async_static_info_updated(
|
||||
|
||||
# Create new entity if it doesn't exist
|
||||
if not old_info:
|
||||
entity = entity_type(entry_data, platform.domain, info, state_type)
|
||||
entity = entity_type(entry_data, info, state_type)
|
||||
add_entities.append(entity)
|
||||
continue
|
||||
|
||||
@@ -112,7 +112,7 @@ def async_static_info_updated(
|
||||
old_info.device_id,
|
||||
info.device_id,
|
||||
)
|
||||
entity = entity_type(entry_data, platform.domain, info, state_type)
|
||||
entity = entity_type(entry_data, info, state_type)
|
||||
add_entities.append(entity)
|
||||
continue
|
||||
|
||||
@@ -162,7 +162,7 @@ def async_static_info_updated(
|
||||
entry_data.async_signal_entity_removal(info_type, old_info.device_id, info.key)
|
||||
|
||||
# Create new entity with the new device_id
|
||||
add_entities.append(entity_type(entry_data, platform.domain, info, state_type))
|
||||
add_entities.append(entity_type(entry_data, info, state_type))
|
||||
|
||||
# Anything still in current_infos is now gone
|
||||
if current_infos:
|
||||
@@ -329,7 +329,6 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
def __init__(
|
||||
self,
|
||||
entry_data: RuntimeEntryData,
|
||||
domain: str,
|
||||
entity_info: EntityInfo,
|
||||
state_type: type[_StateT],
|
||||
) -> None:
|
||||
@@ -343,7 +342,6 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
self._state_type = state_type
|
||||
self._on_static_info_update(entity_info)
|
||||
|
||||
device_name = device_info.name
|
||||
# Determine the device connection based on whether this entity belongs to a sub device
|
||||
if entity_info.device_id:
|
||||
# Entity belongs to a sub device
|
||||
@@ -352,27 +350,12 @@ class EsphomeEntity(EsphomeBaseEntity, Generic[_InfoT, _StateT]):
|
||||
(DOMAIN, f"{device_info.mac_address}_{entity_info.device_id}")
|
||||
}
|
||||
)
|
||||
# Use the pre-computed device_id_to_name mapping for O(1) lookup
|
||||
device_name = entry_data.device_id_to_name.get(
|
||||
entity_info.device_id, device_info.name
|
||||
)
|
||||
else:
|
||||
# Entity belongs to the main device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, device_info.mac_address)}
|
||||
)
|
||||
|
||||
if entity_info.name:
|
||||
self.entity_id = f"{domain}.{device_name}_{entity_info.name}"
|
||||
else:
|
||||
# https://github.com/home-assistant/core/issues/132532
|
||||
# If name is not set, ESPHome will use the sanitized friendly name
|
||||
# as the name, however we want to use the original object_id
|
||||
# as the entity_id before it is sanitized since the sanitizer
|
||||
# is not utf-8 aware. In this case, its always going to be
|
||||
# an empty string so we drop the object_id.
|
||||
self.entity_id = f"{domain}.{device_name}"
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Register callbacks."""
|
||||
entry_data = self._entry_data
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.7.0",
|
||||
"aioesphomeapi==42.8.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
29
homeassistant/components/essent/__init__.py
Normal file
29
homeassistant/components/essent/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""The Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import EssentConfigEntry, EssentDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EssentConfigEntry) -> bool:
|
||||
"""Set up Essent from a config entry."""
|
||||
coordinator = EssentDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
# Start listener updates on the hour to advance cached tariffs
|
||||
coordinator.start_listener_schedule()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: EssentConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
47
homeassistant/components/essent/config_flow.py
Normal file
47
homeassistant/components/essent/config_flow.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Config flow for Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from essent_dynamic_pricing import (
|
||||
EssentClient,
|
||||
EssentConnectionError,
|
||||
EssentDataError,
|
||||
EssentResponseError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EssentConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Essent."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
client = EssentClient(async_get_clientsession(self.hass))
|
||||
|
||||
try:
|
||||
await client.async_get_prices()
|
||||
except (EssentConnectionError, EssentResponseError):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except EssentDataError:
|
||||
return self.async_abort(reason="invalid_data")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error while validating the connection")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user")
|
||||
|
||||
return self.async_create_entry(title="Essent", data={})
|
||||
29
homeassistant/components/essent/const.py
Normal file
29
homeassistant/components/essent/const.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Constants for the Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "essent"
|
||||
UPDATE_INTERVAL: Final = timedelta(hours=12)
|
||||
ATTRIBUTION: Final = "Data provided by Essent"
|
||||
|
||||
|
||||
class EnergyType(StrEnum):
|
||||
"""Supported energy types for Essent pricing."""
|
||||
|
||||
ELECTRICITY = "electricity"
|
||||
GAS = "gas"
|
||||
|
||||
|
||||
class PriceGroup(StrEnum):
|
||||
"""Price group types as provided in tariff groups.
|
||||
|
||||
VAT is not emitted as a price group; use tariff.total_amount_vat for VAT.
|
||||
"""
|
||||
|
||||
MARKET_PRICE = "MARKET_PRICE"
|
||||
PURCHASING_FEE = "PURCHASING_FEE"
|
||||
TAX = "TAX"
|
||||
108
homeassistant/components/essent/coordinator.py
Normal file
108
homeassistant/components/essent/coordinator.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""DataUpdateCoordinator for Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
|
||||
from essent_dynamic_pricing import (
|
||||
EssentClient,
|
||||
EssentConnectionError,
|
||||
EssentDataError,
|
||||
EssentError,
|
||||
EssentPrices,
|
||||
EssentResponseError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
type EssentConfigEntry = ConfigEntry[EssentDataUpdateCoordinator]
|
||||
|
||||
|
||||
class EssentDataUpdateCoordinator(DataUpdateCoordinator[EssentPrices]):
|
||||
"""Class to manage fetching Essent data."""
|
||||
|
||||
config_entry: EssentConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: EssentConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
)
|
||||
self._client = EssentClient(async_get_clientsession(hass))
|
||||
self._unsub_listener: Callable[[], None] | None = None
|
||||
|
||||
def start_listener_schedule(self) -> None:
|
||||
"""Start listener tick schedule after first successful data fetch."""
|
||||
if self.config_entry.pref_disable_polling:
|
||||
_LOGGER.debug("Polling disabled by config entry, not starting listener")
|
||||
return
|
||||
if self._unsub_listener:
|
||||
return
|
||||
_LOGGER.info("Starting listener updates on the hour")
|
||||
self._schedule_listener_tick()
|
||||
|
||||
async def async_shutdown(self) -> None:
|
||||
"""Cancel any scheduled call, and ignore new runs."""
|
||||
await super().async_shutdown()
|
||||
if self._unsub_listener:
|
||||
self._unsub_listener()
|
||||
self._unsub_listener = None
|
||||
|
||||
def _schedule_listener_tick(self) -> None:
|
||||
"""Schedule listener updates on the hour to advance cached tariffs."""
|
||||
if self._unsub_listener:
|
||||
self._unsub_listener()
|
||||
|
||||
now = dt_util.utcnow()
|
||||
next_hour = now + timedelta(hours=1)
|
||||
next_run = datetime(
|
||||
next_hour.year,
|
||||
next_hour.month,
|
||||
next_hour.day,
|
||||
next_hour.hour,
|
||||
tzinfo=dt_util.UTC,
|
||||
)
|
||||
|
||||
_LOGGER.debug("Scheduling next listener tick for %s", next_run)
|
||||
|
||||
@callback
|
||||
def _handle(_: datetime) -> None:
|
||||
"""Handle the scheduled listener tick to update sensors."""
|
||||
self._unsub_listener = None
|
||||
_LOGGER.debug("Listener tick fired, updating sensors with cached data")
|
||||
self.async_update_listeners()
|
||||
self._schedule_listener_tick()
|
||||
|
||||
self._unsub_listener = async_track_point_in_utc_time(
|
||||
self.hass,
|
||||
_handle,
|
||||
next_run,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> EssentPrices:
|
||||
"""Fetch data from API."""
|
||||
try:
|
||||
return await self._client.async_get_prices()
|
||||
except EssentConnectionError as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
except EssentResponseError as err:
|
||||
raise UpdateFailed(str(err)) from err
|
||||
except EssentDataError as err:
|
||||
_LOGGER.debug("Invalid data received: %s", err)
|
||||
raise UpdateFailed(str(err)) from err
|
||||
except EssentError as err:
|
||||
raise UpdateFailed("Unexpected Essent error") from err
|
||||
36
homeassistant/components/essent/entity.py
Normal file
36
homeassistant/components/essent/entity.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Base entity for Essent integration."""
|
||||
|
||||
from essent_dynamic_pricing.models import EnergyData
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTRIBUTION, DOMAIN, EnergyType
|
||||
from .coordinator import EssentDataUpdateCoordinator
|
||||
|
||||
|
||||
class EssentEntity(CoordinatorEntity[EssentDataUpdateCoordinator]):
|
||||
"""Base class for Essent entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_attribution = ATTRIBUTION
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EssentDataUpdateCoordinator,
|
||||
energy_type: EnergyType,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.energy_type = energy_type
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
|
||||
name="Essent",
|
||||
manufacturer="Essent",
|
||||
)
|
||||
|
||||
@property
|
||||
def energy_data(self) -> EnergyData:
|
||||
"""Return the energy data for this entity."""
|
||||
return getattr(self.coordinator.data, self.energy_type.value)
|
||||
12
homeassistant/components/essent/manifest.json
Normal file
12
homeassistant/components/essent/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "essent",
|
||||
"name": "Essent",
|
||||
"codeowners": ["@jaapp"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/essent",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["essent-dynamic-pricing==0.2.7"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
89
homeassistant/components/essent/quality_scale.yaml
Normal file
89
homeassistant/components/essent/quality_scale.yaml
Normal file
@@ -0,0 +1,89 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions or services.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not expose services or actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities rely on DataUpdateCoordinator updates rather than event subscriptions.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not expose user actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
No options flow is provided.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
No authentication is required for this integration.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device-less integration.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom icons are defined.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No known repair flows at this time.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device-less integration.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
215
homeassistant/components/essent/sensor.py
Normal file
215
homeassistant/components/essent/sensor.py
Normal file
@@ -0,0 +1,215 @@
|
||||
"""Sensor platform for Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from essent_dynamic_pricing.models import EnergyData, Tariff
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
EntityCategory,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import CURRENCY_EURO
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import EnergyType, PriceGroup
|
||||
from .coordinator import EssentConfigEntry, EssentDataUpdateCoordinator
|
||||
from .entity import EssentEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class EssentSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describe an Essent sensor."""
|
||||
|
||||
value_fn: Callable[[EnergyData], float | None]
|
||||
energy_types: tuple[EnergyType, ...] = (EnergyType.ELECTRICITY, EnergyType.GAS)
|
||||
|
||||
|
||||
def _get_all_tariffs(data: EnergyData) -> list[Tariff]:
|
||||
"""Return tariffs for both today and tomorrow."""
|
||||
return [*data.tariffs, *data.tariffs_tomorrow]
|
||||
|
||||
|
||||
def _get_current_tariff(data: EnergyData) -> Tariff | None:
|
||||
"""Return the currently active tariff."""
|
||||
now = dt_util.now()
|
||||
for tariff in _get_all_tariffs(data):
|
||||
if tariff.start is None or tariff.end is None:
|
||||
continue
|
||||
if tariff.start <= now < tariff.end:
|
||||
return tariff
|
||||
_LOGGER.debug("No current tariff found")
|
||||
return None
|
||||
|
||||
|
||||
def _get_next_tariff(data: EnergyData) -> Tariff | None:
|
||||
"""Return the next tariff."""
|
||||
now = dt_util.now()
|
||||
for tariff in _get_all_tariffs(data):
|
||||
if tariff.start is None:
|
||||
continue
|
||||
if tariff.start > now:
|
||||
return tariff
|
||||
_LOGGER.debug("No upcoming tariff found")
|
||||
return None
|
||||
|
||||
|
||||
def _get_current_tariff_groups(
|
||||
data: EnergyData,
|
||||
) -> tuple[Tariff | None, dict[str, Any]]:
|
||||
"""Return the current tariff and grouped amounts."""
|
||||
if (tariff := _get_current_tariff(data)) is None:
|
||||
return None, {}
|
||||
groups = {
|
||||
group["type"]: group.get("amount") for group in tariff.groups if "type" in group
|
||||
}
|
||||
return tariff, groups
|
||||
|
||||
|
||||
SENSORS: tuple[EssentSensorEntityDescription, ...] = (
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price",
|
||||
translation_key="current_price",
|
||||
value_fn=lambda energy_data: (
|
||||
None
|
||||
if (tariff := _get_current_tariff(energy_data)) is None
|
||||
else tariff.total_amount
|
||||
),
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="next_price",
|
||||
translation_key="next_price",
|
||||
value_fn=lambda energy_data: (
|
||||
None
|
||||
if (tariff := _get_next_tariff(energy_data)) is None
|
||||
else tariff.total_amount
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="average_today",
|
||||
translation_key="average_today",
|
||||
value_fn=lambda energy_data: energy_data.avg_price,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="lowest_price_today",
|
||||
translation_key="lowest_price_today",
|
||||
value_fn=lambda energy_data: energy_data.min_price,
|
||||
energy_types=(EnergyType.ELECTRICITY,),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="highest_price_today",
|
||||
translation_key="highest_price_today",
|
||||
value_fn=lambda energy_data: energy_data.max_price,
|
||||
energy_types=(EnergyType.ELECTRICITY,),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_ex_vat",
|
||||
translation_key="current_price_ex_vat",
|
||||
value_fn=lambda energy_data: (
|
||||
None
|
||||
if (tariff := _get_current_tariff(energy_data)) is None
|
||||
else tariff.total_amount_ex
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_vat",
|
||||
translation_key="current_price_vat",
|
||||
value_fn=lambda energy_data: (
|
||||
None
|
||||
if (tariff := _get_current_tariff(energy_data)) is None
|
||||
# VAT is exposed as tariff.total_amount_vat, not as a tariff group
|
||||
else tariff.total_amount_vat
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_market_price",
|
||||
translation_key="current_price_market_price",
|
||||
value_fn=lambda energy_data: _get_current_tariff_groups(energy_data)[1].get(
|
||||
PriceGroup.MARKET_PRICE
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_purchasing_fee",
|
||||
translation_key="current_price_purchasing_fee",
|
||||
value_fn=lambda energy_data: _get_current_tariff_groups(energy_data)[1].get(
|
||||
PriceGroup.PURCHASING_FEE
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_tax",
|
||||
translation_key="current_price_tax",
|
||||
value_fn=lambda energy_data: _get_current_tariff_groups(energy_data)[1].get(
|
||||
PriceGroup.TAX
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: EssentConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Essent sensors."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
EssentSensor(coordinator, energy_type, description)
|
||||
for description in SENSORS
|
||||
for energy_type in description.energy_types
|
||||
)
|
||||
|
||||
|
||||
class EssentSensor(EssentEntity, SensorEntity):
|
||||
"""Generic Essent sensor driven by entity descriptions."""
|
||||
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_suggested_display_precision = 3
|
||||
|
||||
entity_description: EssentSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EssentDataUpdateCoordinator,
|
||||
energy_type: EnergyType,
|
||||
description: EssentSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, energy_type)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{energy_type}-{description.key}"
|
||||
self._attr_translation_key = f"{energy_type}_{description.translation_key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.value_fn(self.energy_data)
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str:
|
||||
"""Return the unit of measurement."""
|
||||
return f"{CURRENCY_EURO}/{self.energy_data.unit}"
|
||||
73
homeassistant/components/essent/strings.json
Normal file
73
homeassistant/components/essent/strings.json
Normal file
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_data": "Received invalid data from Essent",
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up Essent dynamic energy price monitoring for the Netherlands. For customers with dynamic pricing contracts only."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"electricity_average_today": {
|
||||
"name": "Average electricity price today"
|
||||
},
|
||||
"electricity_current_price": {
|
||||
"name": "Current electricity price"
|
||||
},
|
||||
"electricity_current_price_ex_vat": {
|
||||
"name": "Current electricity price excl. VAT"
|
||||
},
|
||||
"electricity_current_price_market_price": {
|
||||
"name": "Current electricity market price"
|
||||
},
|
||||
"electricity_current_price_purchasing_fee": {
|
||||
"name": "Current electricity purchasing fee"
|
||||
},
|
||||
"electricity_current_price_tax": {
|
||||
"name": "Current electricity tax"
|
||||
},
|
||||
"electricity_current_price_vat": {
|
||||
"name": "Current electricity VAT"
|
||||
},
|
||||
"electricity_highest_price_today": {
|
||||
"name": "Highest electricity price today"
|
||||
},
|
||||
"electricity_lowest_price_today": {
|
||||
"name": "Lowest electricity price today"
|
||||
},
|
||||
"electricity_next_price": {
|
||||
"name": "Next electricity price"
|
||||
},
|
||||
"gas_average_today": {
|
||||
"name": "Average gas price today"
|
||||
},
|
||||
"gas_current_price": {
|
||||
"name": "Current gas price"
|
||||
},
|
||||
"gas_current_price_ex_vat": {
|
||||
"name": "Current gas price excl. VAT"
|
||||
},
|
||||
"gas_current_price_market_price": {
|
||||
"name": "Current gas market price"
|
||||
},
|
||||
"gas_current_price_purchasing_fee": {
|
||||
"name": "Current gas purchasing fee"
|
||||
},
|
||||
"gas_current_price_tax": {
|
||||
"name": "Current gas tax"
|
||||
},
|
||||
"gas_current_price_vat": {
|
||||
"name": "Current gas VAT"
|
||||
},
|
||||
"gas_next_price": {
|
||||
"name": "Next gas price"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -47,5 +47,13 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:fan"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"trigger": "mdi:fan-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:fan"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted fans to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"toggle": "[%key:common::device_automation::action_type::toggle%]",
|
||||
@@ -66,6 +70,13 @@
|
||||
"forward": "Forward",
|
||||
"reverse": "Reverse"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -152,5 +163,29 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Fan"
|
||||
"title": "Fan",
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers when a fan is turned off.",
|
||||
"description_configured": "[%key:component::fan::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::fan::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a fan is turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a fan is turned on.",
|
||||
"description_configured": "[%key:component::fan::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::fan::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a fan is turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
17
homeassistant/components/fan/trigger.py
Normal file
17
homeassistant/components/fan/trigger.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides triggers for fans."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for fans."""
|
||||
return TRIGGERS
|
||||
18
homeassistant/components/fan/triggers.yaml
Normal file
18
homeassistant/components/fan/triggers.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: fan
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
turned_on: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
@@ -8,7 +8,7 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==0.13.0"],
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
"st": "urn:schemas-upnp-org:device:fritzbox:1"
|
||||
|
||||
@@ -778,7 +778,7 @@ class ManifestJSONView(HomeAssistantView):
|
||||
{
|
||||
"type": "frontend/get_icons",
|
||||
vol.Required("category"): vol.In(
|
||||
{"entity", "entity_component", "services", "triggers", "conditions"}
|
||||
{"conditions", "entity", "entity_component", "services", "triggers"}
|
||||
),
|
||||
vol.Optional("integration"): vol.All(cv.ensure_list, [str]),
|
||||
}
|
||||
|
||||
@@ -19,6 +19,9 @@
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251105.1"]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"winter_mode": {
|
||||
"description": "Adds falling snowflakes on your screen. Get your home ready for winter! ❄️",
|
||||
"disable_confirmation": "Snowflakes will no longer fall on your screen. You can re-enable this at any time in labs settings.",
|
||||
"enable_confirmation": "Snowflakes will start falling on your screen. You can turn this off at any time in labs settings.",
|
||||
"name": "Winter mode"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"mode": {
|
||||
"options": {
|
||||
|
||||
@@ -7,4 +7,6 @@ DEBUG_UI_URL_MESSAGE = "Url and debug_ui cannot be set at the same time."
|
||||
HA_MANAGED_API_PORT = 11984
|
||||
HA_MANAGED_URL = f"http://localhost:{HA_MANAGED_API_PORT}/"
|
||||
HA_MANAGED_UNIX_SOCKET = "/run/go2rtc.sock"
|
||||
# When changing this version, also update the corresponding SHA hash (_GO2RTC_SHA)
|
||||
# in script/hassfest/docker.py.
|
||||
RECOMMENDED_VERSION = "1.9.12"
|
||||
|
||||
64
homeassistant/components/google_air_quality/__init__.py
Normal file
64
homeassistant/components/google_air_quality/__init__.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""The Google Air Quality integration."""
|
||||
|
||||
import asyncio
|
||||
|
||||
from google_air_quality_api.api import GoogleAirQualityApi
|
||||
from google_air_quality_api.auth import Auth
|
||||
|
||||
from homeassistant.const import CONF_API_KEY, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import CONF_REFERRER
|
||||
from .coordinator import (
|
||||
GoogleAirQualityConfigEntry,
|
||||
GoogleAirQualityRuntimeData,
|
||||
GoogleAirQualityUpdateCoordinator,
|
||||
)
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.SENSOR,
|
||||
]
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant, entry: GoogleAirQualityConfigEntry
|
||||
) -> bool:
|
||||
"""Set up Google Air Quality from a config entry."""
|
||||
session = async_get_clientsession(hass)
|
||||
api_key = entry.data[CONF_API_KEY]
|
||||
referrer = entry.data.get(CONF_REFERRER)
|
||||
auth = Auth(session, api_key, referrer=referrer)
|
||||
client = GoogleAirQualityApi(auth)
|
||||
coordinators: dict[str, GoogleAirQualityUpdateCoordinator] = {}
|
||||
for subentry_id in entry.subentries:
|
||||
coordinators[subentry_id] = GoogleAirQualityUpdateCoordinator(
|
||||
hass, entry, subentry_id, client
|
||||
)
|
||||
await asyncio.gather(
|
||||
*(
|
||||
coordinator.async_config_entry_first_refresh()
|
||||
for coordinator in coordinators.values()
|
||||
)
|
||||
)
|
||||
entry.runtime_data = GoogleAirQualityRuntimeData(
|
||||
api=client,
|
||||
subentries_runtime_data=coordinators,
|
||||
)
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
entry.async_on_unload(entry.add_update_listener(async_update_options))
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(
|
||||
hass: HomeAssistant, entry: GoogleAirQualityConfigEntry
|
||||
) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
|
||||
|
||||
async def async_update_options(
|
||||
hass: HomeAssistant, entry: GoogleAirQualityConfigEntry
|
||||
) -> None:
|
||||
"""Update options."""
|
||||
await hass.config_entries.async_reload(entry.entry_id)
|
||||
198
homeassistant/components/google_air_quality/config_flow.py
Normal file
198
homeassistant/components/google_air_quality/config_flow.py
Normal file
@@ -0,0 +1,198 @@
|
||||
"""Config flow for the Google Air Quality integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from google_air_quality_api.api import GoogleAirQualityApi
|
||||
from google_air_quality_api.auth import Auth
|
||||
from google_air_quality_api.exceptions import GoogleAirQualityApiError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import (
|
||||
ConfigEntry,
|
||||
ConfigEntryState,
|
||||
ConfigFlow,
|
||||
ConfigFlowResult,
|
||||
ConfigSubentryFlow,
|
||||
SubentryFlowResult,
|
||||
)
|
||||
from homeassistant.const import (
|
||||
CONF_API_KEY,
|
||||
CONF_LATITUDE,
|
||||
CONF_LOCATION,
|
||||
CONF_LONGITUDE,
|
||||
CONF_NAME,
|
||||
)
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.data_entry_flow import section
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import LocationSelector, LocationSelectorConfig
|
||||
|
||||
from .const import CONF_REFERRER, DOMAIN, SECTION_API_KEY_OPTIONS
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_API_KEY): str,
|
||||
vol.Optional(SECTION_API_KEY_OPTIONS): section(
|
||||
vol.Schema({vol.Optional(CONF_REFERRER): str}), {"collapsed": True}
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
async def _validate_input(
|
||||
user_input: dict[str, Any],
|
||||
api: GoogleAirQualityApi,
|
||||
errors: dict[str, str],
|
||||
description_placeholders: dict[str, str],
|
||||
) -> bool:
|
||||
try:
|
||||
await api.async_air_quality(
|
||||
lat=user_input[CONF_LOCATION][CONF_LATITUDE],
|
||||
long=user_input[CONF_LOCATION][CONF_LONGITUDE],
|
||||
)
|
||||
except GoogleAirQualityApiError as err:
|
||||
errors["base"] = "cannot_connect"
|
||||
description_placeholders["error_message"] = str(err)
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
else:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _get_location_schema(hass: HomeAssistant) -> vol.Schema:
|
||||
"""Return the schema for a location with default values from the hass config."""
|
||||
return vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_NAME, default=hass.config.location_name): str,
|
||||
vol.Required(
|
||||
CONF_LOCATION,
|
||||
default={
|
||||
CONF_LATITUDE: hass.config.latitude,
|
||||
CONF_LONGITUDE: hass.config.longitude,
|
||||
},
|
||||
): LocationSelector(LocationSelectorConfig(radius=False)),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _is_location_already_configured(
|
||||
hass: HomeAssistant, new_data: dict[str, float], epsilon: float = 1e-4
|
||||
) -> bool:
|
||||
"""Check if the location is already configured."""
|
||||
for entry in hass.config_entries.async_entries(DOMAIN):
|
||||
for subentry in entry.subentries.values():
|
||||
# A more accurate way is to use the haversine formula, but for simplicity
|
||||
# we use a simple distance check. The epsilon value is small anyway.
|
||||
# This is mostly to capture cases where the user has slightly moved the location pin.
|
||||
if (
|
||||
abs(subentry.data[CONF_LATITUDE] - new_data[CONF_LATITUDE]) <= epsilon
|
||||
and abs(subentry.data[CONF_LONGITUDE] - new_data[CONF_LONGITUDE])
|
||||
<= epsilon
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class GoogleAirQualityConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Google AirQuality."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {
|
||||
"api_key_url": "https://developers.google.com/maps/documentation/air-quality/get-api-key",
|
||||
"restricting_api_keys_url": "https://developers.google.com/maps/api-security-best-practices#restricting-api-keys",
|
||||
}
|
||||
if user_input is not None:
|
||||
api_key = user_input[CONF_API_KEY]
|
||||
referrer = user_input.get(SECTION_API_KEY_OPTIONS, {}).get(CONF_REFERRER)
|
||||
self._async_abort_entries_match({CONF_API_KEY: api_key})
|
||||
if _is_location_already_configured(self.hass, user_input[CONF_LOCATION]):
|
||||
return self.async_abort(reason="already_configured")
|
||||
session = async_get_clientsession(self.hass)
|
||||
referrer = user_input.get(SECTION_API_KEY_OPTIONS, {}).get(CONF_REFERRER)
|
||||
auth = Auth(session, user_input[CONF_API_KEY], referrer=referrer)
|
||||
api = GoogleAirQualityApi(auth)
|
||||
if await _validate_input(user_input, api, errors, description_placeholders):
|
||||
return self.async_create_entry(
|
||||
title="Google Air Quality",
|
||||
data={
|
||||
CONF_API_KEY: api_key,
|
||||
CONF_REFERRER: referrer,
|
||||
},
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "location",
|
||||
"data": user_input[CONF_LOCATION],
|
||||
"title": user_input[CONF_NAME],
|
||||
"unique_id": None,
|
||||
},
|
||||
],
|
||||
)
|
||||
else:
|
||||
user_input = {}
|
||||
schema = STEP_USER_DATA_SCHEMA.schema.copy()
|
||||
schema.update(_get_location_schema(self.hass).schema)
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
vol.Schema(schema), user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
@callback
|
||||
def async_get_supported_subentry_types(
|
||||
cls, config_entry: ConfigEntry
|
||||
) -> dict[str, type[ConfigSubentryFlow]]:
|
||||
"""Return subentries supported by this integration."""
|
||||
return {"location": LocationSubentryFlowHandler}
|
||||
|
||||
|
||||
class LocationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"""Handle a subentry flow for location."""
|
||||
|
||||
async def async_step_location(
|
||||
self,
|
||||
user_input: dict[str, Any] | None = None,
|
||||
) -> SubentryFlowResult:
|
||||
"""Handle the location step."""
|
||||
if self._get_entry().state != ConfigEntryState.LOADED:
|
||||
return self.async_abort(reason="entry_not_loaded")
|
||||
|
||||
errors: dict[str, str] = {}
|
||||
description_placeholders: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
if _is_location_already_configured(self.hass, user_input[CONF_LOCATION]):
|
||||
return self.async_abort(reason="already_configured")
|
||||
api: GoogleAirQualityApi = self._get_entry().runtime_data.api
|
||||
if await _validate_input(user_input, api, errors, description_placeholders):
|
||||
return self.async_create_entry(
|
||||
title=user_input[CONF_NAME],
|
||||
data=user_input[CONF_LOCATION],
|
||||
)
|
||||
else:
|
||||
user_input = {}
|
||||
return self.async_show_form(
|
||||
step_id="location",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
_get_location_schema(self.hass), user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders=description_placeholders,
|
||||
)
|
||||
|
||||
async_step_user = async_step_location
|
||||
7
homeassistant/components/google_air_quality/const.py
Normal file
7
homeassistant/components/google_air_quality/const.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Constants for the Google Air Quality integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN = "google_air_quality"
|
||||
SECTION_API_KEY_OPTIONS: Final = "api_key_options"
|
||||
CONF_REFERRER: Final = "referrer"
|
||||
68
homeassistant/components/google_air_quality/coordinator.py
Normal file
68
homeassistant/components/google_air_quality/coordinator.py
Normal file
@@ -0,0 +1,68 @@
|
||||
"""Coordinator for fetching data from Google Air Quality API."""
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
from google_air_quality_api.api import GoogleAirQualityApi
|
||||
from google_air_quality_api.exceptions import GoogleAirQualityApiError
|
||||
from google_air_quality_api.model import AirQualityData
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
UPDATE_INTERVAL: Final = timedelta(hours=1)
|
||||
|
||||
type GoogleAirQualityConfigEntry = ConfigEntry[GoogleAirQualityRuntimeData]
|
||||
|
||||
|
||||
class GoogleAirQualityUpdateCoordinator(DataUpdateCoordinator[AirQualityData]):
|
||||
"""Coordinator for fetching Google AirQuality data."""
|
||||
|
||||
config_entry: GoogleAirQualityConfigEntry
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
config_entry: GoogleAirQualityConfigEntry,
|
||||
subentry_id: str,
|
||||
client: GoogleAirQualityApi,
|
||||
) -> None:
|
||||
"""Initialize DataUpdateCoordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}_{subentry_id}",
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
)
|
||||
self.client = client
|
||||
subentry = config_entry.subentries[subentry_id]
|
||||
self.lat = subentry.data[CONF_LATITUDE]
|
||||
self.long = subentry.data[CONF_LONGITUDE]
|
||||
|
||||
async def _async_update_data(self) -> AirQualityData:
|
||||
"""Fetch air quality data for this coordinate."""
|
||||
try:
|
||||
return await self.client.async_air_quality(self.lat, self.long)
|
||||
except GoogleAirQualityApiError as ex:
|
||||
_LOGGER.debug("Cannot fetch air quality data: %s", str(ex))
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="unable_to_fetch",
|
||||
) from ex
|
||||
|
||||
|
||||
@dataclass
|
||||
class GoogleAirQualityRuntimeData:
|
||||
"""Runtime data for the Google Air Quality integration."""
|
||||
|
||||
api: GoogleAirQualityApi
|
||||
subentries_runtime_data: dict[str, GoogleAirQualityUpdateCoordinator]
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user