mirror of
https://github.com/home-assistant/core.git
synced 2025-11-25 18:48:05 +00:00
Compare commits
211 Commits
adguard/ad
...
entity_reg
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
7631c8fd45 | ||
|
|
521a6784b4 | ||
|
|
d2ba7e8e3e | ||
|
|
405c2f96fd | ||
|
|
90ef5b1c25 | ||
|
|
562f72f321 | ||
|
|
f5ee3bd872 | ||
|
|
8dd35cb129 | ||
|
|
6fa971d393 | ||
|
|
6deff1c78f | ||
|
|
f96996b27f | ||
|
|
eb9fc66ca9 | ||
|
|
43e4fe4526 | ||
|
|
252dbb706f | ||
|
|
d7ad0cba94 | ||
|
|
159a8d39d6 | ||
|
|
8f1abb6dbb | ||
|
|
242c02890f | ||
|
|
eb793a3942 | ||
|
|
e65c47ba0f | ||
|
|
24dba24571 | ||
|
|
4c04dc00dd | ||
|
|
0c366506c5 | ||
|
|
a0323e80f5 | ||
|
|
e496fb2227 | ||
|
|
c2219aadb1 | ||
|
|
2cd0637324 | ||
|
|
293f8f7c87 | ||
|
|
1af569ae17 | ||
|
|
d4db5ec0cc | ||
|
|
4be1fa9a3a | ||
|
|
149c1e6772 | ||
|
|
e37e7574a4 | ||
|
|
37152a27ba | ||
|
|
5025af8334 | ||
|
|
ec9fb9837a | ||
|
|
30451e3aaa | ||
|
|
c66c3497c1 | ||
|
|
1063e71318 | ||
|
|
1a875b021a | ||
|
|
15328a4aff | ||
|
|
083cfb89af | ||
|
|
bd129c2085 | ||
|
|
f73bc9242b | ||
|
|
4506be5065 | ||
|
|
80c611e562 | ||
|
|
b44aafc294 | ||
|
|
af1e3205b8 | ||
|
|
1360fe7f23 | ||
|
|
b5bb8583f8 | ||
|
|
9b62c212ce | ||
|
|
8fa56ad92e | ||
|
|
f82f0a1862 | ||
|
|
878881b100 | ||
|
|
743583d9bd | ||
|
|
f537204d22 | ||
|
|
ec74be7922 | ||
|
|
3574f647d0 | ||
|
|
6c4296a0de | ||
|
|
e780e3db8c | ||
|
|
4ed2efa4e8 | ||
|
|
abef6f7b3e | ||
|
|
5556fb99e6 | ||
|
|
16669e39bd | ||
|
|
ca088d81c3 | ||
|
|
12847fb0a4 | ||
|
|
8b758c46f4 | ||
|
|
f439471dc1 | ||
|
|
5ff3233b09 | ||
|
|
22daed083f | ||
|
|
13384de464 | ||
|
|
f5e5183190 | ||
|
|
e18668b8f9 | ||
|
|
15647f2720 | ||
|
|
c961126ee5 | ||
|
|
5142c5f418 | ||
|
|
3d459704e1 | ||
|
|
5a8ddcd0b3 | ||
|
|
2667a40b92 | ||
|
|
08baa99691 | ||
|
|
d84cf26f40 | ||
|
|
ba5472da90 | ||
|
|
e20b88a54f | ||
|
|
ac69712a51 | ||
|
|
f0e75ba0ed | ||
|
|
e64598e7f5 | ||
|
|
e6f9a8e7d6 | ||
|
|
1e8b42f843 | ||
|
|
430eee0b28 | ||
|
|
b4799aa7ea | ||
|
|
ab45460069 | ||
|
|
c8fd6db3ff | ||
|
|
0a9f200ca4 | ||
|
|
8591335660 | ||
|
|
c01089e994 | ||
|
|
79a7daf89d | ||
|
|
d22867b852 | ||
|
|
ddb74c5af4 | ||
|
|
9aec7b12c2 | ||
|
|
bf42e3769a | ||
|
|
43f40c6f0e | ||
|
|
03ac634e6d | ||
|
|
a204e85d84 | ||
|
|
79c7ad7646 | ||
|
|
704d4c896d | ||
|
|
5b6a4b0fea | ||
|
|
ef5573c693 | ||
|
|
45aecd525a | ||
|
|
ce1146492e | ||
|
|
1ce890b105 | ||
|
|
3e7bef77e5 | ||
|
|
1222828852 | ||
|
|
1ef64582eb | ||
|
|
d363bd63eb | ||
|
|
5916af1115 | ||
|
|
f8bf7ec1ff | ||
|
|
41e42b9581 | ||
|
|
51f68f2776 | ||
|
|
773cb7424c | ||
|
|
eefab75ef0 | ||
|
|
81b4122b73 | ||
|
|
fc8f8b39b4 | ||
|
|
ec0918027e | ||
|
|
8a54f8d4e2 | ||
|
|
5c27126b6d | ||
|
|
e069aff0e2 | ||
|
|
733526fae3 | ||
|
|
1ef001f8e9 | ||
|
|
7732377fde | ||
|
|
b7786e589b | ||
|
|
4f60970a91 | ||
|
|
1c1286dd57 | ||
|
|
41c9f08f60 | ||
|
|
fc4bfab0f7 | ||
|
|
769a12f74e | ||
|
|
dabaa2bc5e | ||
|
|
b674828a91 | ||
|
|
761da66658 | ||
|
|
c8aba62301 | ||
|
|
07ab2e6805 | ||
|
|
f62e0c8c08 | ||
|
|
6ca00f9dbb | ||
|
|
0fba80e30f | ||
|
|
7073c40385 | ||
|
|
8fb9d92daf | ||
|
|
2d81665f99 | ||
|
|
b398935539 | ||
|
|
95f588aae1 | ||
|
|
ffe524d95a | ||
|
|
ee05adfca1 | ||
|
|
168c915b5f | ||
|
|
6c80be52af | ||
|
|
ead92cdf82 | ||
|
|
c0f0cfef59 | ||
|
|
cefc0ba96e | ||
|
|
ad091b1062 | ||
|
|
876bc6d8c4 | ||
|
|
9f206d4363 | ||
|
|
a2d11e6d98 | ||
|
|
3b38af3984 | ||
|
|
3875f91bb9 | ||
|
|
c813776b0c | ||
|
|
3afb421cba | ||
|
|
c16633568b | ||
|
|
87f8ff2bb4 | ||
|
|
b423303f1e | ||
|
|
f6ff222679 | ||
|
|
0152fa0c03 | ||
|
|
37ebbe83bc | ||
|
|
63e036d39e | ||
|
|
f0cbf34a78 | ||
|
|
596bc89ee6 | ||
|
|
b8c877e1d2 | ||
|
|
197d9781cb | ||
|
|
f3f323637e | ||
|
|
9748abc103 | ||
|
|
596f049971 | ||
|
|
dee80cb6f5 | ||
|
|
b4ab73468b | ||
|
|
a300199a97 | ||
|
|
09dd765583 | ||
|
|
0c8b765415 | ||
|
|
0824ec502f | ||
|
|
9e0e353a5f | ||
|
|
e934b006e2 | ||
|
|
05479bb8fd | ||
|
|
d07247566d | ||
|
|
19e6097df6 | ||
|
|
2cff3cf29c | ||
|
|
5cac9b8e5e | ||
|
|
c2a516ea32 | ||
|
|
192b38d3e2 | ||
|
|
bb018e3546 | ||
|
|
4919d73cc5 | ||
|
|
f3ddffb5ff | ||
|
|
9bdfa77fa0 | ||
|
|
c65003009f | ||
|
|
0f722109b7 | ||
|
|
f7d86dec3c | ||
|
|
6b49c8a70c | ||
|
|
ab9a8f3e53 | ||
|
|
4e12628266 | ||
|
|
e6d8d4de42 | ||
|
|
6620b90eb4 | ||
|
|
6fd3af8891 | ||
|
|
46979b8418 | ||
|
|
1718a11de2 | ||
|
|
2016b1d8c7 | ||
|
|
4b72e45fc2 | ||
|
|
ead5ce905b | ||
|
|
f233f2da3f |
80
.github/workflows/builder.yml
vendored
80
.github/workflows/builder.yml
vendored
@@ -14,6 +14,7 @@ env:
|
||||
PIP_TIMEOUT: 60
|
||||
UV_HTTP_TIMEOUT: 60
|
||||
UV_SYSTEM_PYTHON: "true"
|
||||
BASE_IMAGE_VERSION: "2025.11.0"
|
||||
|
||||
jobs:
|
||||
init:
|
||||
@@ -21,18 +22,15 @@ jobs:
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
version: ${{ steps.version.outputs.version }}
|
||||
channel: ${{ steps.version.outputs.channel }}
|
||||
publish: ${{ steps.version.outputs.publish }}
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
with:
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -79,7 +77,7 @@ jobs:
|
||||
name: Build ${{ matrix.arch }} base core image
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
needs: init
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os }}
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
@@ -87,7 +85,12 @@ jobs:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
arch: ["amd64", "aarch64"]
|
||||
include:
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
- name: Checkout the repository
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
@@ -116,7 +119,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
@@ -184,16 +187,59 @@ jobs:
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
# home-assistant/builder doesn't support sha pinning
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.09.0
|
||||
- name: Install Cosign
|
||||
uses: sigstore/cosign-installer@faadad0cce49287aee09b3a48701e75088a2c6ad # v4.0.0
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
--${{ matrix.arch }} \
|
||||
--cosign \
|
||||
--target /data \
|
||||
--generic ${{ needs.init.outputs.version }}
|
||||
cosign-release: "v2.5.3"
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@b5ca514318bd6ebac0fb2aedd5d36ec1b5c232a2 # v3.10.0
|
||||
|
||||
- name: Build variables
|
||||
id: vars
|
||||
shell: bash
|
||||
run: |
|
||||
echo "base_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant-base:${{ env.BASE_IMAGE_VERSION }}" >> "$GITHUB_OUTPUT"
|
||||
echo "cache_image=ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:latest" >> "$GITHUB_OUTPUT"
|
||||
echo "created=$(date --rfc-3339=seconds --utc)" >> "$GITHUB_OUTPUT"
|
||||
|
||||
- name: Verify base image signature
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/docker/.*" \
|
||||
"${{ steps.vars.outputs.base_image }}"
|
||||
|
||||
- name: Verify cache image signature
|
||||
id: cache
|
||||
continue-on-error: true
|
||||
run: |
|
||||
cosign verify \
|
||||
--certificate-oidc-issuer https://token.actions.githubusercontent.com \
|
||||
--certificate-identity-regexp "https://github.com/home-assistant/core/.*" \
|
||||
"${{ steps.vars.outputs.cache_image }}"
|
||||
|
||||
- name: Build base image
|
||||
id: build
|
||||
uses: docker/build-push-action@263435318d21b8e681c14492fe198d362a7d2c83 # v6.18.0
|
||||
with:
|
||||
context: .
|
||||
file: ./Dockerfile
|
||||
platforms: ${{ steps.vars.outputs.platform }}
|
||||
push: true
|
||||
cache-from: ${{ steps.cache.outcome == 'success' && steps.vars.outputs.cache_image || '' }}
|
||||
build-args: |
|
||||
BUILD_FROM=${{ steps.vars.outputs.base_image }}
|
||||
tags: ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}
|
||||
labels: |
|
||||
io.hass.arch=${{ matrix.arch }}
|
||||
io.hass.version=${{ needs.init.outputs.version }}
|
||||
org.opencontainers.image.created=${{ steps.vars.outputs.created }}
|
||||
org.opencontainers.image.version=${{ needs.init.outputs.version }}
|
||||
|
||||
- name: Sign image
|
||||
run: |
|
||||
cosign sign --yes "ghcr.io/home-assistant/${{ matrix.arch }}-homeassistant:${{ needs.init.outputs.version }}@${{ steps.build.outputs.digest }}"
|
||||
|
||||
build_machine:
|
||||
name: Build ${{ matrix.machine }} machine core image
|
||||
@@ -417,7 +463,7 @@ jobs:
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
2
.github/workflows/ci.yaml
vendored
2
.github/workflows/ci.yaml
vendored
@@ -257,7 +257,7 @@ jobs:
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/init@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@e12f0178983d466f2f6028f5cc7a6d786fd97f4b # v4.31.4
|
||||
uses: github/codeql-action/analyze@fdbfb4d2750291e159f0156def62b853c2798ca2 # v4.31.5
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -231,7 +231,7 @@ jobs:
|
||||
- name: Detect duplicates using AI
|
||||
id: ai_detection
|
||||
if: steps.extract.outputs.should_continue == 'true' && steps.fetch_similar.outputs.has_similar == 'true'
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
with:
|
||||
model: openai/gpt-4o
|
||||
system-prompt: |
|
||||
|
||||
@@ -57,7 +57,7 @@ jobs:
|
||||
- name: Detect language using AI
|
||||
id: ai_language_detection
|
||||
if: steps.detect_language.outputs.should_continue == 'true'
|
||||
uses: actions/ai-inference@a1c11829223a786afe3b5663db904a3aa1eac3a2 # v2.0.1
|
||||
uses: actions/ai-inference@5022b33bc1431add9b2831934daf8147a2ad9331 # v2.0.2
|
||||
with:
|
||||
model: openai/gpt-4o-mini
|
||||
system-prompt: |
|
||||
|
||||
2
.github/workflows/translations.yml
vendored
2
.github/workflows/translations.yml
vendored
@@ -22,7 +22,7 @@ jobs:
|
||||
uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6.0.0
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
|
||||
17
.github/workflows/wheels.yml
vendored
17
.github/workflows/wheels.yml
vendored
@@ -28,8 +28,6 @@ jobs:
|
||||
name: Initialize wheels builder
|
||||
if: github.repository_owner == 'home-assistant'
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
architectures: ${{ steps.info.outputs.architectures }}
|
||||
steps:
|
||||
- &checkout
|
||||
name: Checkout the repository
|
||||
@@ -37,7 +35,7 @@ jobs:
|
||||
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
|
||||
uses: actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
@@ -50,10 +48,6 @@ jobs:
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install -r requirements.txt
|
||||
|
||||
- name: Get information
|
||||
id: info
|
||||
uses: home-assistant/actions/helpers/info@master
|
||||
|
||||
- name: Create requirements_diff file
|
||||
run: |
|
||||
if [[ ${{ github.event_name }} =~ (schedule|workflow_dispatch) ]]; then
|
||||
@@ -114,9 +108,10 @@ jobs:
|
||||
fail-fast: false
|
||||
matrix: &matrix-build
|
||||
abi: ["cp313", "cp314"]
|
||||
arch: ${{ fromJson(needs.init.outputs.architectures) }}
|
||||
arch: ["amd64", "aarch64"]
|
||||
include:
|
||||
- os: ubuntu-latest
|
||||
- arch: amd64
|
||||
os: ubuntu-latest
|
||||
- arch: aarch64
|
||||
os: ubuntu-24.04-arm
|
||||
steps:
|
||||
@@ -140,9 +135,8 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: &home-assistant-wheels home-assistant/wheels@2025.10.0
|
||||
uses: &home-assistant-wheels home-assistant/wheels@6066c17a2a4aafcf7bdfeae01717f63adfcdba98 # 2025.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -183,7 +177,6 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
# home-assistant/wheels doesn't support sha pinning
|
||||
- name: Build wheels
|
||||
uses: *home-assistant-wheels
|
||||
with:
|
||||
|
||||
@@ -94,7 +94,7 @@ repos:
|
||||
pass_filenames: false
|
||||
language: script
|
||||
types: [text]
|
||||
files: ^(script/hassfest/metadata\.py|homeassistant/const\.py$|pyproject\.toml|homeassistant/components/go2rtc/const\.py)$
|
||||
files: ^(script/hassfest/(metadata|docker)\.py|homeassistant/const\.py$|pyproject\.toml)$
|
||||
- id: hassfest-mypy-config
|
||||
name: hassfest-mypy-config
|
||||
entry: script/run-in-env.sh python3 -m script.hassfest -p mypy_config
|
||||
|
||||
@@ -120,7 +120,6 @@ homeassistant.components.blueprint.*
|
||||
homeassistant.components.bluesound.*
|
||||
homeassistant.components.bluetooth.*
|
||||
homeassistant.components.bluetooth_adapters.*
|
||||
homeassistant.components.bluetooth_tracker.*
|
||||
homeassistant.components.bmw_connected_drive.*
|
||||
homeassistant.components.bond.*
|
||||
homeassistant.components.bosch_alarm.*
|
||||
|
||||
8
CODEOWNERS
generated
8
CODEOWNERS
generated
@@ -183,8 +183,8 @@ build.json @home-assistant/supervisor
|
||||
/homeassistant/components/automation/ @home-assistant/core
|
||||
/tests/components/automation/ @home-assistant/core
|
||||
/homeassistant/components/avea/ @pattyland
|
||||
/homeassistant/components/awair/ @ahayworth @danielsjf
|
||||
/tests/components/awair/ @ahayworth @danielsjf
|
||||
/homeassistant/components/awair/ @ahayworth @ricohageman
|
||||
/tests/components/awair/ @ahayworth @ricohageman
|
||||
/homeassistant/components/aws_s3/ @tomasbedrich
|
||||
/tests/components/aws_s3/ @tomasbedrich
|
||||
/homeassistant/components/axis/ @Kane610
|
||||
@@ -391,6 +391,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/dsmr/ @Robbie1221
|
||||
/homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna
|
||||
/homeassistant/components/duckdns/ @tr4nt0r
|
||||
/tests/components/duckdns/ @tr4nt0r
|
||||
/homeassistant/components/duke_energy/ @hunterjm
|
||||
/tests/components/duke_energy/ @hunterjm
|
||||
/homeassistant/components/duotecno/ @cereal2nd
|
||||
@@ -470,6 +472,8 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/escea/ @lazdavila
|
||||
/homeassistant/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/tests/components/esphome/ @jesserockz @kbx81 @bdraco
|
||||
/homeassistant/components/essent/ @jaapp
|
||||
/tests/components/essent/ @jaapp
|
||||
/homeassistant/components/eufylife_ble/ @bdr99
|
||||
/tests/components/eufylife_ble/ @bdr99
|
||||
/homeassistant/components/event/ @home-assistant/core
|
||||
|
||||
33
Dockerfile
generated
33
Dockerfile
generated
@@ -4,32 +4,33 @@
|
||||
ARG BUILD_FROM
|
||||
FROM ${BUILD_FROM}
|
||||
|
||||
LABEL \
|
||||
io.hass.type="core" \
|
||||
org.opencontainers.image.authors="The Home Assistant Authors" \
|
||||
org.opencontainers.image.description="Open-source home automation platform running on Python 3" \
|
||||
org.opencontainers.image.documentation="https://www.home-assistant.io/docs/" \
|
||||
org.opencontainers.image.licenses="Apache-2.0" \
|
||||
org.opencontainers.image.source="https://github.com/home-assistant/core" \
|
||||
org.opencontainers.image.title="Home Assistant" \
|
||||
org.opencontainers.image.url="https://www.home-assistant.io/"
|
||||
|
||||
# Synchronize with homeassistant/core.py:async_stop
|
||||
ENV \
|
||||
S6_SERVICES_GRACETIME=240000 \
|
||||
UV_SYSTEM_PYTHON=true \
|
||||
UV_NO_CACHE=true
|
||||
|
||||
ARG QEMU_CPU
|
||||
|
||||
# Home Assistant S6-Overlay
|
||||
COPY rootfs /
|
||||
|
||||
# Needs to be redefined inside the FROM statement to be set for RUN commands
|
||||
ARG BUILD_ARCH
|
||||
# Get go2rtc binary
|
||||
RUN \
|
||||
case "${BUILD_ARCH}" in \
|
||||
"aarch64") go2rtc_suffix='arm64' ;; \
|
||||
*) go2rtc_suffix=${BUILD_ARCH} ;; \
|
||||
esac \
|
||||
&& curl -L https://github.com/AlexxIT/go2rtc/releases/download/v1.9.12/go2rtc_linux_${go2rtc_suffix} --output /bin/go2rtc \
|
||||
&& chmod +x /bin/go2rtc \
|
||||
# Verify go2rtc can be executed
|
||||
&& go2rtc --version
|
||||
# Add go2rtc binary
|
||||
COPY --from=ghcr.io/alexxit/go2rtc@sha256:baef0aa19d759fcfd31607b34ce8eaf039d496282bba57731e6ae326896d7640 /usr/local/bin/go2rtc /bin/go2rtc
|
||||
|
||||
# Install uv
|
||||
RUN pip3 install uv==0.9.6
|
||||
RUN \
|
||||
# Verify go2rtc can be executed
|
||||
go2rtc --version \
|
||||
# Install uv
|
||||
&& pip3 install uv==0.9.6
|
||||
|
||||
WORKDIR /usr/src
|
||||
|
||||
|
||||
16
build.yaml
16
build.yaml
@@ -1,16 +0,0 @@
|
||||
image: ghcr.io/home-assistant/{arch}-homeassistant
|
||||
build_from:
|
||||
aarch64: ghcr.io/home-assistant/aarch64-homeassistant-base:2025.11.0
|
||||
amd64: ghcr.io/home-assistant/amd64-homeassistant-base:2025.11.0
|
||||
cosign:
|
||||
base_identity: https://github.com/home-assistant/docker/.*
|
||||
identity: https://github.com/home-assistant/core/.*
|
||||
labels:
|
||||
io.hass.type: core
|
||||
org.opencontainers.image.title: Home Assistant
|
||||
org.opencontainers.image.description: Open-source home automation platform running on Python 3
|
||||
org.opencontainers.image.source: https://github.com/home-assistant/core
|
||||
org.opencontainers.image.authors: The Home Assistant Authors
|
||||
org.opencontainers.image.url: https://www.home-assistant.io/
|
||||
org.opencontainers.image.documentation: https://www.home-assistant.io/docs/
|
||||
org.opencontainers.image.licenses: Apache-2.0
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"domain": "raspberry_pi",
|
||||
"name": "Raspberry Pi",
|
||||
"integrations": ["raspberry_pi", "rpi_camera", "rpi_power", "remote_rpi_gpio"]
|
||||
"integrations": ["raspberry_pi", "rpi_power", "remote_rpi_gpio"]
|
||||
}
|
||||
|
||||
@@ -75,9 +75,19 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
"""Return the current temperature.
|
||||
|
||||
If floor temperature is available, thermostat is set up for floor heating.
|
||||
"""
|
||||
if self._status.temp_floor is not None:
|
||||
return self._status.temp_floor
|
||||
return self._status.temp_air
|
||||
|
||||
@property
|
||||
def current_humidity(self) -> float | None:
|
||||
"""Return the current humidity."""
|
||||
return self._status.hum_air
|
||||
|
||||
@property
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the target temperature."""
|
||||
@@ -126,6 +136,13 @@ class AirobotClimate(AirobotEntity, ClimateEntity):
|
||||
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set HVAC mode.
|
||||
|
||||
This thermostat only supports HEAT mode. The climate platform validates
|
||||
that only supported modes are passed, so this method is a no-op.
|
||||
"""
|
||||
|
||||
async def async_set_preset_mode(self, preset_mode: str) -> None:
|
||||
"""Set new preset mode."""
|
||||
try:
|
||||
|
||||
@@ -59,7 +59,9 @@ rules:
|
||||
exception-translations: done
|
||||
icon-translations: todo
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: This integration doesn't have any cases where raising an issue is needed.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Single device integration, no stale device handling needed.
|
||||
|
||||
@@ -36,5 +36,28 @@
|
||||
"alarm_trigger": {
|
||||
"service": "mdi:bell-ring"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"trigger": "mdi:shield"
|
||||
},
|
||||
"armed_away": {
|
||||
"trigger": "mdi:shield-lock"
|
||||
},
|
||||
"armed_home": {
|
||||
"trigger": "mdi:shield-home"
|
||||
},
|
||||
"armed_night": {
|
||||
"trigger": "mdi:shield-moon"
|
||||
},
|
||||
"armed_vacation": {
|
||||
"trigger": "mdi:shield-airplane"
|
||||
},
|
||||
"disarmed": {
|
||||
"trigger": "mdi:shield-off"
|
||||
},
|
||||
"triggered": {
|
||||
"trigger": "mdi:bell-ring"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted alarms to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"arm_away": "Arm {entity_name} away",
|
||||
@@ -71,6 +75,15 @@
|
||||
"message": "Arming requires a code but none was given for {entity_id}."
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"alarm_arm_away": {
|
||||
"description": "Arms the alarm in the away mode.",
|
||||
@@ -143,5 +156,84 @@
|
||||
"name": "Trigger"
|
||||
}
|
||||
},
|
||||
"title": "Alarm control panel"
|
||||
"title": "Alarm control panel",
|
||||
"triggers": {
|
||||
"armed": {
|
||||
"description": "Triggers when an alarm is armed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed"
|
||||
},
|
||||
"armed_away": {
|
||||
"description": "Triggers when an alarm is armed away.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_away::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed away"
|
||||
},
|
||||
"armed_home": {
|
||||
"description": "Triggers when an alarm is armed home.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_home::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed home"
|
||||
},
|
||||
"armed_night": {
|
||||
"description": "Triggers when an alarm is armed night.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_night::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed night"
|
||||
},
|
||||
"armed_vacation": {
|
||||
"description": "Triggers when an alarm is armed vacation.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::armed_vacation::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is armed vacation"
|
||||
},
|
||||
"disarmed": {
|
||||
"description": "Triggers when an alarm is disarmed.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::disarmed::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is disarmed"
|
||||
},
|
||||
"triggered": {
|
||||
"description": "Triggers when an alarm is triggered.",
|
||||
"description_configured": "[%key:component::alarm_control_panel::triggers::triggered::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::alarm_control_panel::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::alarm_control_panel::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an alarm is triggered"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
99
homeassistant/components/alarm_control_panel/trigger.py
Normal file
99
homeassistant/components/alarm_control_panel/trigger.py
Normal file
@@ -0,0 +1,99 @@
|
||||
"""Provides triggers for alarm control panels."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_supported_features
|
||||
from homeassistant.helpers.trigger import (
|
||||
EntityStateTriggerBase,
|
||||
Trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_trigger,
|
||||
)
|
||||
|
||||
from .const import DOMAIN, AlarmControlPanelEntityFeature, AlarmControlPanelState
|
||||
|
||||
|
||||
def supports_feature(hass: HomeAssistant, entity_id: str, features: int) -> bool:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return bool(get_supported_features(hass, entity_id) & features)
|
||||
except HomeAssistantError:
|
||||
return False
|
||||
|
||||
|
||||
class EntityStateTriggerRequiredFeatures(EntityStateTriggerBase):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_required_features: int
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if supports_feature(self._hass, entity_id, self._required_features)
|
||||
}
|
||||
|
||||
|
||||
def make_entity_state_trigger_required_features(
|
||||
domain: str, to_state: str, required_features: int
|
||||
) -> type[EntityStateTriggerBase]:
|
||||
"""Create an entity state trigger class."""
|
||||
|
||||
class CustomTrigger(EntityStateTriggerRequiredFeatures):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_domain = domain
|
||||
_to_state = to_state
|
||||
_required_features = required_features
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"armed": make_conditional_entity_state_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
AlarmControlPanelState.ARMING,
|
||||
AlarmControlPanelState.DISARMED,
|
||||
AlarmControlPanelState.DISARMING,
|
||||
AlarmControlPanelState.PENDING,
|
||||
AlarmControlPanelState.TRIGGERED,
|
||||
},
|
||||
to_states={
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelState.ARMED_CUSTOM_BYPASS,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
},
|
||||
),
|
||||
"armed_away": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_AWAY,
|
||||
AlarmControlPanelEntityFeature.ARM_AWAY,
|
||||
),
|
||||
"armed_home": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_HOME,
|
||||
AlarmControlPanelEntityFeature.ARM_HOME,
|
||||
),
|
||||
"armed_night": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_NIGHT,
|
||||
AlarmControlPanelEntityFeature.ARM_NIGHT,
|
||||
),
|
||||
"armed_vacation": make_entity_state_trigger_required_features(
|
||||
DOMAIN,
|
||||
AlarmControlPanelState.ARMED_VACATION,
|
||||
AlarmControlPanelEntityFeature.ARM_VACATION,
|
||||
),
|
||||
"disarmed": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.DISARMED),
|
||||
"triggered": make_entity_state_trigger(DOMAIN, AlarmControlPanelState.TRIGGERED),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for alarm control panels."""
|
||||
return TRIGGERS
|
||||
53
homeassistant/components/alarm_control_panel/triggers.yaml
Normal file
53
homeassistant/components/alarm_control_panel/triggers.yaml
Normal file
@@ -0,0 +1,53 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
fields: &trigger_common_fields
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
armed: *trigger_common
|
||||
|
||||
armed_away:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
|
||||
armed_home:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_HOME
|
||||
|
||||
armed_night:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
|
||||
armed_vacation:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: alarm_control_panel
|
||||
supported_features:
|
||||
- alarm_control_panel.AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
|
||||
disarmed: *trigger_common
|
||||
|
||||
triggered: *trigger_common
|
||||
@@ -8,5 +8,5 @@
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["aioamazondevices"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["aioamazondevices==9.0.2"]
|
||||
"requirements": ["aioamazondevices==9.0.3"]
|
||||
}
|
||||
|
||||
@@ -17,13 +17,7 @@ from homeassistant.helpers import (
|
||||
)
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import (
|
||||
CONF_CHAT_MODEL,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
)
|
||||
from .const import CONF_CHAT_MODEL, DEFAULT, DEFAULT_CONVERSATION_NAME, DOMAIN, LOGGER
|
||||
|
||||
PLATFORMS = (Platform.AI_TASK, Platform.CONVERSATION)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -46,9 +40,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
|
||||
# Use model from first conversation subentry for validation
|
||||
subentries = list(entry.subentries.values())
|
||||
if subentries:
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model_id = subentries[0].data.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
else:
|
||||
model_id = RECOMMENDED_CHAT_MODEL
|
||||
model_id = DEFAULT[CONF_CHAT_MODEL]
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
except anthropic.AuthenticationError as err:
|
||||
|
||||
@@ -6,7 +6,7 @@ from functools import partial
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import Any, cast
|
||||
|
||||
import anthropic
|
||||
import voluptuous as vol
|
||||
@@ -54,17 +54,11 @@ from .const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT,
|
||||
DEFAULT_AI_TASK_NAME,
|
||||
DEFAULT_CONVERSATION_NAME,
|
||||
DOMAIN,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
RECOMMENDED_WEB_SEARCH,
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
WEB_SEARCH_UNSUPPORTED_MODELS,
|
||||
)
|
||||
|
||||
@@ -76,13 +70,13 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
RECOMMENDED_CONVERSATION_OPTIONS = {
|
||||
DEFAULT_CONVERSATION_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
CONF_LLM_HASS_API: [llm.LLM_API_ASSIST],
|
||||
CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT,
|
||||
}
|
||||
|
||||
RECOMMENDED_AI_TASK_OPTIONS = {
|
||||
DEFAULT_AI_TASK_OPTIONS = {
|
||||
CONF_RECOMMENDED: True,
|
||||
}
|
||||
|
||||
@@ -136,13 +130,13 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
subentries=[
|
||||
{
|
||||
"subentry_type": "conversation",
|
||||
"data": RECOMMENDED_CONVERSATION_OPTIONS,
|
||||
"data": DEFAULT_CONVERSATION_OPTIONS,
|
||||
"title": DEFAULT_CONVERSATION_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
{
|
||||
"subentry_type": "ai_task_data",
|
||||
"data": RECOMMENDED_AI_TASK_OPTIONS,
|
||||
"data": DEFAULT_AI_TASK_OPTIONS,
|
||||
"title": DEFAULT_AI_TASK_NAME,
|
||||
"unique_id": None,
|
||||
},
|
||||
@@ -180,9 +174,9 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
) -> SubentryFlowResult:
|
||||
"""Add a subentry."""
|
||||
if self._subentry_type == "ai_task_data":
|
||||
self.options = RECOMMENDED_AI_TASK_OPTIONS.copy()
|
||||
self.options = DEFAULT_AI_TASK_OPTIONS.copy()
|
||||
else:
|
||||
self.options = RECOMMENDED_CONVERSATION_OPTIONS.copy()
|
||||
self.options = DEFAULT_CONVERSATION_OPTIONS.copy()
|
||||
return await self.async_step_init()
|
||||
|
||||
async def async_step_reconfigure(
|
||||
@@ -283,7 +277,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
step_schema: VolDictType = {
|
||||
vol.Optional(
|
||||
CONF_CHAT_MODEL,
|
||||
default=RECOMMENDED_CHAT_MODEL,
|
||||
default=DEFAULT[CONF_CHAT_MODEL],
|
||||
): SelectSelector(
|
||||
SelectSelectorConfig(
|
||||
options=await self._get_model_list(), custom_value=True
|
||||
@@ -291,11 +285,11 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
),
|
||||
vol.Optional(
|
||||
CONF_MAX_TOKENS,
|
||||
default=RECOMMENDED_MAX_TOKENS,
|
||||
default=DEFAULT[CONF_MAX_TOKENS],
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_TEMPERATURE,
|
||||
default=RECOMMENDED_TEMPERATURE,
|
||||
default=DEFAULT[CONF_TEMPERATURE],
|
||||
): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)),
|
||||
}
|
||||
|
||||
@@ -325,12 +319,14 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
|
||||
if not model.startswith(tuple(NON_THINKING_MODELS)):
|
||||
step_schema[
|
||||
vol.Optional(CONF_THINKING_BUDGET, default=RECOMMENDED_THINKING_BUDGET)
|
||||
vol.Optional(
|
||||
CONF_THINKING_BUDGET, default=DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
] = vol.All(
|
||||
NumberSelector(
|
||||
NumberSelectorConfig(
|
||||
min=0,
|
||||
max=self.options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
max=self.options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
)
|
||||
),
|
||||
vol.Coerce(int),
|
||||
@@ -343,15 +339,15 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
{
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH,
|
||||
default=RECOMMENDED_WEB_SEARCH,
|
||||
default=DEFAULT[CONF_WEB_SEARCH],
|
||||
): bool,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_MAX_USES,
|
||||
default=RECOMMENDED_WEB_SEARCH_MAX_USES,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_MAX_USES],
|
||||
): int,
|
||||
vol.Optional(
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
default=RECOMMENDED_WEB_SEARCH_USER_LOCATION,
|
||||
default=DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
@@ -369,9 +365,10 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
user_input = {}
|
||||
|
||||
if user_input is not None:
|
||||
if user_input.get(CONF_WEB_SEARCH, RECOMMENDED_WEB_SEARCH) and not errors:
|
||||
if user_input.get(CONF_WEB_SEARCH, DEFAULT[CONF_WEB_SEARCH]) and not errors:
|
||||
if user_input.get(
|
||||
CONF_WEB_SEARCH_USER_LOCATION, RECOMMENDED_WEB_SEARCH_USER_LOCATION
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT[CONF_WEB_SEARCH_USER_LOCATION],
|
||||
):
|
||||
user_input.update(await self._get_location_data())
|
||||
|
||||
@@ -456,7 +453,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
}
|
||||
)
|
||||
response = await client.messages.create(
|
||||
model=RECOMMENDED_CHAT_MODEL,
|
||||
model=cast(str, DEFAULT[CONF_CHAT_MODEL]),
|
||||
messages=[
|
||||
{
|
||||
"role": "user",
|
||||
@@ -471,7 +468,7 @@ class ConversationSubentryFlowHandler(ConfigSubentryFlow):
|
||||
"content": "{", # hints the model to skip any preamble
|
||||
},
|
||||
],
|
||||
max_tokens=RECOMMENDED_MAX_TOKENS,
|
||||
max_tokens=cast(int, DEFAULT[CONF_MAX_TOKENS]),
|
||||
)
|
||||
_LOGGER.debug("Model response: %s", response.content)
|
||||
location_data = location_schema(
|
||||
|
||||
@@ -11,25 +11,29 @@ DEFAULT_AI_TASK_NAME = "Claude AI Task"
|
||||
CONF_RECOMMENDED = "recommended"
|
||||
CONF_PROMPT = "prompt"
|
||||
CONF_CHAT_MODEL = "chat_model"
|
||||
RECOMMENDED_CHAT_MODEL = "claude-3-5-haiku-latest"
|
||||
CONF_MAX_TOKENS = "max_tokens"
|
||||
RECOMMENDED_MAX_TOKENS = 3000
|
||||
CONF_TEMPERATURE = "temperature"
|
||||
RECOMMENDED_TEMPERATURE = 1.0
|
||||
CONF_THINKING_BUDGET = "thinking_budget"
|
||||
RECOMMENDED_THINKING_BUDGET = 0
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
CONF_WEB_SEARCH = "web_search"
|
||||
RECOMMENDED_WEB_SEARCH = False
|
||||
CONF_WEB_SEARCH_USER_LOCATION = "user_location"
|
||||
RECOMMENDED_WEB_SEARCH_USER_LOCATION = False
|
||||
CONF_WEB_SEARCH_MAX_USES = "web_search_max_uses"
|
||||
RECOMMENDED_WEB_SEARCH_MAX_USES = 5
|
||||
CONF_WEB_SEARCH_CITY = "city"
|
||||
CONF_WEB_SEARCH_REGION = "region"
|
||||
CONF_WEB_SEARCH_COUNTRY = "country"
|
||||
CONF_WEB_SEARCH_TIMEZONE = "timezone"
|
||||
|
||||
DEFAULT = {
|
||||
CONF_CHAT_MODEL: "claude-3-5-haiku-latest",
|
||||
CONF_MAX_TOKENS: 3000,
|
||||
CONF_TEMPERATURE: 1.0,
|
||||
CONF_THINKING_BUDGET: 0,
|
||||
CONF_WEB_SEARCH: False,
|
||||
CONF_WEB_SEARCH_USER_LOCATION: False,
|
||||
CONF_WEB_SEARCH_MAX_USES: 5,
|
||||
}
|
||||
|
||||
MIN_THINKING_BUDGET = 1024
|
||||
|
||||
NON_THINKING_MODELS = [
|
||||
"claude-3-5", # Both sonnet and haiku
|
||||
"claude-3-opus",
|
||||
|
||||
@@ -84,14 +84,11 @@ from .const import (
|
||||
CONF_WEB_SEARCH_REGION,
|
||||
CONF_WEB_SEARCH_TIMEZONE,
|
||||
CONF_WEB_SEARCH_USER_LOCATION,
|
||||
DEFAULT,
|
||||
DOMAIN,
|
||||
LOGGER,
|
||||
MIN_THINKING_BUDGET,
|
||||
NON_THINKING_MODELS,
|
||||
RECOMMENDED_CHAT_MODEL,
|
||||
RECOMMENDED_MAX_TOKENS,
|
||||
RECOMMENDED_TEMPERATURE,
|
||||
RECOMMENDED_THINKING_BUDGET,
|
||||
)
|
||||
|
||||
# Max number of back and forth with the LLM to generate a response
|
||||
@@ -604,17 +601,19 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
raise TypeError("First message must be a system message")
|
||||
messages = _convert_content(chat_log.content[1:])
|
||||
|
||||
model = options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model = options.get(CONF_CHAT_MODEL, DEFAULT[CONF_CHAT_MODEL])
|
||||
|
||||
model_args = MessageCreateParamsStreaming(
|
||||
model=model,
|
||||
messages=messages,
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS),
|
||||
max_tokens=options.get(CONF_MAX_TOKENS, DEFAULT[CONF_MAX_TOKENS]),
|
||||
system=system.content,
|
||||
stream=True,
|
||||
)
|
||||
|
||||
thinking_budget = options.get(CONF_THINKING_BUDGET, RECOMMENDED_THINKING_BUDGET)
|
||||
thinking_budget = options.get(
|
||||
CONF_THINKING_BUDGET, DEFAULT[CONF_THINKING_BUDGET]
|
||||
)
|
||||
if (
|
||||
not model.startswith(tuple(NON_THINKING_MODELS))
|
||||
and thinking_budget >= MIN_THINKING_BUDGET
|
||||
@@ -625,7 +624,7 @@ class AnthropicBaseLLMEntity(Entity):
|
||||
else:
|
||||
model_args["thinking"] = ThinkingConfigDisabledParam(type="disabled")
|
||||
model_args["temperature"] = options.get(
|
||||
CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE
|
||||
CONF_TEMPERATURE, DEFAULT[CONF_TEMPERATURE]
|
||||
)
|
||||
|
||||
tools: list[ToolUnionParam] = []
|
||||
|
||||
@@ -14,5 +14,19 @@
|
||||
"start_conversation": {
|
||||
"service": "mdi:forum"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"trigger": "mdi:chat-sleep"
|
||||
},
|
||||
"listening": {
|
||||
"trigger": "mdi:chat-question"
|
||||
},
|
||||
"processing": {
|
||||
"trigger": "mdi:chat-processing"
|
||||
},
|
||||
"responding": {
|
||||
"trigger": "mdi:chat-alert"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted Assist satellites to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"entity_component": {
|
||||
"_": {
|
||||
"name": "Assist satellite",
|
||||
@@ -16,6 +20,13 @@
|
||||
"id": "Answer ID",
|
||||
"sentences": "Sentences"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -98,5 +109,51 @@
|
||||
"name": "Start conversation"
|
||||
}
|
||||
},
|
||||
"title": "Assist satellite"
|
||||
"title": "Assist satellite",
|
||||
"triggers": {
|
||||
"idle": {
|
||||
"description": "Triggers when an Assist satellite becomes idle.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::idle::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite becomes idle"
|
||||
},
|
||||
"listening": {
|
||||
"description": "Triggers when an Assist satellite starts listening.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::listening::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite starts listening"
|
||||
},
|
||||
"processing": {
|
||||
"description": "Triggers when an Assist satellite is processing.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::processing::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite is processing"
|
||||
},
|
||||
"responding": {
|
||||
"description": "Triggers when an Assist satellite is responding.",
|
||||
"description_configured": "[%key:component::assist_satellite::triggers::responding::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::assist_satellite::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::assist_satellite::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When an Assist satellite is responding"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
19
homeassistant/components/assist_satellite/trigger.py
Normal file
19
homeassistant/components/assist_satellite/trigger.py
Normal file
@@ -0,0 +1,19 @@
|
||||
"""Provides triggers for assist satellites."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from .const import DOMAIN
|
||||
from .entity import AssistSatelliteState
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"idle": make_entity_state_trigger(DOMAIN, AssistSatelliteState.IDLE),
|
||||
"listening": make_entity_state_trigger(DOMAIN, AssistSatelliteState.LISTENING),
|
||||
"processing": make_entity_state_trigger(DOMAIN, AssistSatelliteState.PROCESSING),
|
||||
"responding": make_entity_state_trigger(DOMAIN, AssistSatelliteState.RESPONDING),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for assist satellites."""
|
||||
return TRIGGERS
|
||||
20
homeassistant/components/assist_satellite/triggers.yaml
Normal file
20
homeassistant/components/assist_satellite/triggers.yaml
Normal file
@@ -0,0 +1,20 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: assist_satellite
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
idle: *trigger_common
|
||||
listening: *trigger_common
|
||||
processing: *trigger_common
|
||||
responding: *trigger_common
|
||||
@@ -29,5 +29,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/august",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["pubnub", "yalexs"],
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.1.2"]
|
||||
"requirements": ["yalexs==9.2.0", "yalexs-ble==3.2.1"]
|
||||
}
|
||||
|
||||
@@ -6,5 +6,10 @@
|
||||
"dependencies": ["blueprint", "trace"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/automation",
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"new_triggers_conditions": {
|
||||
"report_issue_url": "https://github.com/home-assistant/core/issues/new?template=bug_report.yml&integration_link=https://www.home-assistant.io/integrations/automation&integration_name=Automation"
|
||||
}
|
||||
},
|
||||
"quality_scale": "internal"
|
||||
}
|
||||
|
||||
@@ -67,6 +67,14 @@
|
||||
"title": "[%key:component::automation::common::validation_failed_title%]"
|
||||
}
|
||||
},
|
||||
"preview_features": {
|
||||
"new_triggers_conditions": {
|
||||
"description": "Enables new intuitive triggers and conditions that are more user-friendly than technical state-based options.\n\nThese new automation features support targets across your entire home, letting you trigger automations for any entity, device, area, floor, or label (for example, when any light in your living room turned on). Integrations can now also provide their own intuitive triggers and conditions, just like actions.\n\nThis preview also includes a new tree view to help you navigate your home when adding triggers, conditions, and actions.",
|
||||
"disable_confirmation": "Disabling this preview will cause automations and scripts that use the new intuitive triggers and conditions to fail.\n\nBefore disabling, ensure that your automations or scripts do not rely on this feature.",
|
||||
"enable_confirmation": "This feature is still in development and may change. These new intuitive triggers and conditions are being refined based on user feedback and are not yet complete.\n\nBy enabling this preview, you'll have early access to these new capabilities, but be aware that they may be modified or updated in future releases.",
|
||||
"name": "Intuitive triggers and conditions"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"reload": {
|
||||
"description": "Reloads the automation configuration.",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"domain": "awair",
|
||||
"name": "Awair",
|
||||
"codeowners": ["@ahayworth", "@danielsjf"],
|
||||
"codeowners": ["@ahayworth", "@ricohageman"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/awair",
|
||||
"iot_class": "local_polling",
|
||||
|
||||
98
homeassistant/components/awair/quality_scale.yaml
Normal file
98
homeassistant/components/awair/quality_scale.yaml
Normal file
@@ -0,0 +1,98 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: No actions defined
|
||||
appropriate-polling:
|
||||
status: done
|
||||
comment: |
|
||||
We fetch both user and devices, could probably slow one down
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow:
|
||||
status: todo
|
||||
comment: |
|
||||
data_description fields are missing
|
||||
Should not abort in cloud step when anything else than invalid access token
|
||||
Find out why access token is optional
|
||||
Discovered devices step is redundant
|
||||
config-flow-test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Move happy flow to the top and merge with `test_show_form`
|
||||
Reuse `result`
|
||||
Cloud tests should initialize with data directly
|
||||
Tests should finish in CREATE_ENTRY
|
||||
dependency-transparency:
|
||||
status: todo
|
||||
comment: |
|
||||
Dependency is not built in the CI
|
||||
docs-actions: todo
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: No explicit event subscription
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: No actions defined
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters: todo
|
||||
docs-installation-parameters: todo
|
||||
entity-unavailable: todo
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: todo
|
||||
reauthentication-flow: done
|
||||
test-coverage:
|
||||
status: todo
|
||||
comment: |
|
||||
Patch objects where we use them
|
||||
Use test helpers to load JSON
|
||||
typo `no_devicess_fixture`
|
||||
Make common config entries for cloud and local
|
||||
Test setup of the integration
|
||||
|
||||
# Gold
|
||||
devices:
|
||||
status: done
|
||||
comment: |
|
||||
Can move to shorthand attribute
|
||||
Can remove typecast
|
||||
diagnostics: todo
|
||||
discovery: done
|
||||
discovery-update-info: done
|
||||
docs-data-update: done
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices: todo
|
||||
entity-category: todo
|
||||
entity-device-class:
|
||||
status: done
|
||||
comment: |
|
||||
Can remove rounding
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: todo
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: todo
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
@@ -21,10 +21,10 @@ from .const import (
|
||||
ATTR_ITEM_NUMBER,
|
||||
ATTR_SERIAL_NUMBER,
|
||||
ATTR_TYPE_NUMBER,
|
||||
COMPATIBLE_MODELS,
|
||||
CONF_SERIAL_NUMBER,
|
||||
DEFAULT_MODEL,
|
||||
DOMAIN,
|
||||
SELECTABLE_MODELS,
|
||||
)
|
||||
from .util import get_serial_number_from_jid
|
||||
|
||||
@@ -70,7 +70,7 @@ class BangOlufsenConfigFlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
{
|
||||
vol.Required(CONF_HOST): str,
|
||||
vol.Required(CONF_MODEL, default=DEFAULT_MODEL): SelectSelector(
|
||||
SelectSelectorConfig(options=COMPATIBLE_MODELS)
|
||||
SelectSelectorConfig(options=SELECTABLE_MODELS)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -62,6 +62,7 @@ class BangOlufsenMediaType(StrEnum):
|
||||
class BangOlufsenModel(StrEnum):
|
||||
"""Enum for compatible model names."""
|
||||
|
||||
# Mozart devices
|
||||
BEOCONNECT_CORE = "Beoconnect Core"
|
||||
BEOLAB_8 = "BeoLab 8"
|
||||
BEOLAB_28 = "BeoLab 28"
|
||||
@@ -71,7 +72,26 @@ class BangOlufsenModel(StrEnum):
|
||||
BEOSOUND_BALANCE = "Beosound Balance"
|
||||
BEOSOUND_EMERGE = "Beosound Emerge"
|
||||
BEOSOUND_LEVEL = "Beosound Level"
|
||||
BEOSOUND_PREMIERE = "Beosound Premiere"
|
||||
BEOSOUND_THEATRE = "Beosound Theatre"
|
||||
# Remote devices
|
||||
BEOREMOTE_ONE = "Beoremote One"
|
||||
|
||||
|
||||
# Physical "buttons" on devices
|
||||
class BangOlufsenButtons(StrEnum):
|
||||
"""Enum for device buttons."""
|
||||
|
||||
BLUETOOTH = "Bluetooth"
|
||||
MICROPHONE = "Microphone"
|
||||
NEXT = "Next"
|
||||
PLAY_PAUSE = "PlayPause"
|
||||
PRESET_1 = "Preset1"
|
||||
PRESET_2 = "Preset2"
|
||||
PRESET_3 = "Preset3"
|
||||
PRESET_4 = "Preset4"
|
||||
PREVIOUS = "Previous"
|
||||
VOLUME = "Volume"
|
||||
|
||||
|
||||
# Dispatcher events
|
||||
@@ -79,6 +99,7 @@ class WebsocketNotification(StrEnum):
|
||||
"""Enum for WebSocket notification types."""
|
||||
|
||||
ACTIVE_LISTENING_MODE = "active_listening_mode"
|
||||
BEO_REMOTE_BUTTON = "beo_remote_button"
|
||||
BUTTON = "button"
|
||||
PLAYBACK_ERROR = "playback_error"
|
||||
PLAYBACK_METADATA = "playback_metadata"
|
||||
@@ -96,6 +117,7 @@ class WebsocketNotification(StrEnum):
|
||||
BEOLINK_AVAILABLE_LISTENERS = "beolinkAvailableListeners"
|
||||
CONFIGURATION = "configuration"
|
||||
NOTIFICATION = "notification"
|
||||
REMOTE_CONTROL_DEVICES = "remoteControlDevices"
|
||||
REMOTE_MENU_CHANGED = "remoteMenuChanged"
|
||||
|
||||
ALL = "all"
|
||||
@@ -111,7 +133,11 @@ CONF_SERIAL_NUMBER: Final = "serial_number"
|
||||
CONF_BEOLINK_JID: Final = "jid"
|
||||
|
||||
# Models to choose from in manual configuration.
|
||||
COMPATIBLE_MODELS: list[str] = [x.value for x in BangOlufsenModel]
|
||||
SELECTABLE_MODELS: list[str] = [
|
||||
model.value for model in BangOlufsenModel if model != BangOlufsenModel.BEOREMOTE_ONE
|
||||
]
|
||||
|
||||
MANUFACTURER: Final[str] = "Bang & Olufsen"
|
||||
|
||||
# Attribute names for zeroconf discovery.
|
||||
ATTR_TYPE_NUMBER: Final[str] = "tn"
|
||||
@@ -204,29 +230,16 @@ FALLBACK_SOURCES: Final[SourceArray] = SourceArray(
|
||||
),
|
||||
]
|
||||
)
|
||||
# Map for storing compatibility of devices.
|
||||
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: Final[str] = "device_buttons"
|
||||
|
||||
MODEL_SUPPORT_MAP = {
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS: (
|
||||
BangOlufsenModel.BEOLAB_8,
|
||||
BangOlufsenModel.BEOLAB_28,
|
||||
BangOlufsenModel.BEOSOUND_2,
|
||||
BangOlufsenModel.BEOSOUND_A5,
|
||||
BangOlufsenModel.BEOSOUND_A9,
|
||||
BangOlufsenModel.BEOSOUND_BALANCE,
|
||||
BangOlufsenModel.BEOSOUND_EMERGE,
|
||||
BangOlufsenModel.BEOSOUND_LEVEL,
|
||||
BangOlufsenModel.BEOSOUND_THEATRE,
|
||||
)
|
||||
}
|
||||
|
||||
# Device events
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT: Final[str] = f"{DOMAIN}_websocket_event"
|
||||
|
||||
# Dict used to translate native Bang & Olufsen event names to string.json compatible ones
|
||||
EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
# Beoremote One
|
||||
"KeyPress": "key_press",
|
||||
"KeyRelease": "key_release",
|
||||
# Physical "buttons"
|
||||
"shortPress (Release)": "short_press_release",
|
||||
"longPress (Timeout)": "long_press_timeout",
|
||||
"longPress (Release)": "long_press_release",
|
||||
@@ -236,18 +249,7 @@ EVENT_TRANSLATION_MAP: dict[str, str] = {
|
||||
|
||||
CONNECTION_STATUS: Final[str] = "CONNECTION_STATUS"
|
||||
|
||||
DEVICE_BUTTONS: Final[list[str]] = [
|
||||
"Bluetooth",
|
||||
"Microphone",
|
||||
"Next",
|
||||
"PlayPause",
|
||||
"Preset1",
|
||||
"Preset2",
|
||||
"Preset3",
|
||||
"Preset4",
|
||||
"Previous",
|
||||
"Volume",
|
||||
]
|
||||
DEVICE_BUTTONS: Final[list[str]] = [x.value for x in BangOlufsenButtons]
|
||||
|
||||
|
||||
DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
@@ -258,6 +260,70 @@ DEVICE_BUTTON_EVENTS: Final[list[str]] = [
|
||||
"very_long_press_release",
|
||||
]
|
||||
|
||||
BEO_REMOTE_SUBMENU_CONTROL: Final[str] = "Control"
|
||||
BEO_REMOTE_SUBMENU_LIGHT: Final[str] = "Light"
|
||||
|
||||
# Common for both submenus
|
||||
BEO_REMOTE_KEYS: Final[tuple[str, ...]] = (
|
||||
"Blue",
|
||||
"Digit0",
|
||||
"Digit1",
|
||||
"Digit2",
|
||||
"Digit3",
|
||||
"Digit4",
|
||||
"Digit5",
|
||||
"Digit6",
|
||||
"Digit7",
|
||||
"Digit8",
|
||||
"Digit9",
|
||||
"Down",
|
||||
"Green",
|
||||
"Left",
|
||||
"Play",
|
||||
"Red",
|
||||
"Rewind",
|
||||
"Right",
|
||||
"Select",
|
||||
"Stop",
|
||||
"Up",
|
||||
"Wind",
|
||||
"Yellow",
|
||||
"Func1",
|
||||
"Func2",
|
||||
"Func3",
|
||||
"Func4",
|
||||
"Func5",
|
||||
"Func6",
|
||||
"Func7",
|
||||
"Func8",
|
||||
"Func9",
|
||||
"Func10",
|
||||
"Func11",
|
||||
"Func12",
|
||||
"Func13",
|
||||
"Func14",
|
||||
"Func15",
|
||||
"Func16",
|
||||
"Func17",
|
||||
)
|
||||
|
||||
# "keys" that are unique to the Control submenu
|
||||
BEO_REMOTE_CONTROL_KEYS: Final[tuple[str, ...]] = (
|
||||
"Func18",
|
||||
"Func19",
|
||||
"Func20",
|
||||
"Func21",
|
||||
"Func22",
|
||||
"Func23",
|
||||
"Func24",
|
||||
"Func25",
|
||||
"Func26",
|
||||
"Func27",
|
||||
)
|
||||
|
||||
BEO_REMOTE_KEY_EVENTS: Final[list[str]] = ["key_press", "key_release"]
|
||||
|
||||
|
||||
# Beolink Converter NL/ML sources need to be transformed to upper case
|
||||
BEOLINK_JOIN_SOURCES_TO_UPPER = (
|
||||
"aux_a",
|
||||
|
||||
@@ -6,11 +6,13 @@ from typing import TYPE_CHECKING, Any
|
||||
|
||||
from homeassistant.components.event import DOMAIN as EVENT_DOMAIN
|
||||
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import DEVICE_BUTTONS, DOMAIN
|
||||
from .const import DOMAIN
|
||||
from .util import get_device_buttons
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -40,7 +42,7 @@ async def async_get_config_entry_diagnostics(
|
||||
data["media_player"] = state_dict
|
||||
|
||||
# Add button Event entity states (if enabled)
|
||||
for device_button in DEVICE_BUTTONS:
|
||||
for device_button in get_device_buttons(config_entry.data[CONF_MODEL]):
|
||||
if entity_id := entity_registry.async_get_entity_id(
|
||||
EVENT_DOMAIN, DOMAIN, f"{config_entry.unique_id}_{device_button}"
|
||||
):
|
||||
|
||||
@@ -2,22 +2,34 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import PairedRemote
|
||||
|
||||
from homeassistant.components.event import EventDeviceClass, EventEntity
|
||||
from homeassistant.const import CONF_MODEL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import BangOlufsenConfigEntry
|
||||
from .const import (
|
||||
BEO_REMOTE_CONTROL_KEYS,
|
||||
BEO_REMOTE_KEY_EVENTS,
|
||||
BEO_REMOTE_KEYS,
|
||||
BEO_REMOTE_SUBMENU_CONTROL,
|
||||
BEO_REMOTE_SUBMENU_LIGHT,
|
||||
CONNECTION_STATUS,
|
||||
DEVICE_BUTTON_EVENTS,
|
||||
DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_DEVICE_BUTTONS,
|
||||
MODEL_SUPPORT_MAP,
|
||||
DOMAIN,
|
||||
MANUFACTURER,
|
||||
BangOlufsenModel,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenEntity
|
||||
from .util import get_device_buttons, get_remotes
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
@@ -27,25 +39,87 @@ async def async_setup_entry(
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Sensor entities from config entry."""
|
||||
"""Set up Event entities from config entry."""
|
||||
entities: list[BangOlufsenEvent] = []
|
||||
|
||||
if config_entry.data[CONF_MODEL] in MODEL_SUPPORT_MAP[MODEL_SUPPORT_DEVICE_BUTTONS]:
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in DEVICE_BUTTONS
|
||||
async_add_entities(
|
||||
BangOlufsenButtonEvent(config_entry, button_type)
|
||||
for button_type in get_device_buttons(config_entry.data[CONF_MODEL])
|
||||
)
|
||||
|
||||
# Check for connected Beoremote One
|
||||
remotes = await get_remotes(config_entry.runtime_data.client)
|
||||
|
||||
for remote in remotes:
|
||||
# Add Light keys
|
||||
entities.extend(
|
||||
[
|
||||
BangOlufsenRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_LIGHT}/{key_type}",
|
||||
)
|
||||
for key_type in BEO_REMOTE_KEYS
|
||||
]
|
||||
)
|
||||
|
||||
# Add Control keys
|
||||
entities.extend(
|
||||
[
|
||||
BangOlufsenRemoteKeyEvent(
|
||||
config_entry,
|
||||
remote,
|
||||
f"{BEO_REMOTE_SUBMENU_CONTROL}/{key_type}",
|
||||
)
|
||||
for key_type in (*BEO_REMOTE_KEYS, *BEO_REMOTE_CONTROL_KEYS)
|
||||
]
|
||||
)
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Event class for Button events."""
|
||||
# If the remote is no longer available, then delete the device.
|
||||
# The remote may appear as being available to the device after it has been unpaired on the remote
|
||||
# As it has to be removed from the device on the app.
|
||||
|
||||
device_registry = dr.async_get(hass)
|
||||
devices = device_registry.devices.get_devices_for_config_entry_id(
|
||||
config_entry.entry_id
|
||||
)
|
||||
for device in devices:
|
||||
if (
|
||||
device.model == BangOlufsenModel.BEOREMOTE_ONE
|
||||
and device.serial_number not in {remote.serial_number for remote in remotes}
|
||||
):
|
||||
device_registry.async_update_device(
|
||||
device.id, remove_config_entry_id=config_entry.entry_id
|
||||
)
|
||||
|
||||
async_add_entities(new_entities=entities)
|
||||
|
||||
|
||||
class BangOlufsenEvent(BangOlufsenEntity, EventEntity):
|
||||
"""Base Event class."""
|
||||
|
||||
_attr_device_class = EventDeviceClass.BUTTON
|
||||
_attr_entity_registry_enabled_default = False
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry) -> None:
|
||||
"""Initialize Event."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
||||
class BangOlufsenButtonEvent(BangOlufsenEvent):
|
||||
"""Event class for Button events."""
|
||||
|
||||
_attr_event_types = DEVICE_BUTTON_EVENTS
|
||||
|
||||
def __init__(self, config_entry: BangOlufsenConfigEntry, button_type: str) -> None:
|
||||
"""Initialize Button."""
|
||||
super().__init__(config_entry, config_entry.runtime_data.client)
|
||||
super().__init__(config_entry)
|
||||
|
||||
self._attr_unique_id = f"{self._unique_id}_{button_type}"
|
||||
|
||||
@@ -59,20 +133,65 @@ class BangOlufsenButtonEvent(BangOlufsenEntity, EventEntity):
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BUTTON}_{self._button_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self, event: str) -> None:
|
||||
"""Handle event."""
|
||||
self._trigger_event(event)
|
||||
self.async_write_ha_state()
|
||||
|
||||
class BangOlufsenRemoteKeyEvent(BangOlufsenEvent):
|
||||
"""Event class for Beoremote One key events."""
|
||||
|
||||
_attr_event_types = BEO_REMOTE_KEY_EVENTS
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config_entry: BangOlufsenConfigEntry,
|
||||
remote: PairedRemote,
|
||||
key_type: str,
|
||||
) -> None:
|
||||
"""Initialize Beoremote One key."""
|
||||
super().__init__(config_entry)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
assert remote.serial_number
|
||||
|
||||
self._attr_unique_id = f"{remote.serial_number}_{self._unique_id}_{key_type}"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{remote.serial_number}_{self._unique_id}")},
|
||||
name=f"{BangOlufsenModel.BEOREMOTE_ONE}-{remote.serial_number}-{self._unique_id}",
|
||||
model=BangOlufsenModel.BEOREMOTE_ONE,
|
||||
serial_number=remote.serial_number,
|
||||
sw_version=remote.app_version,
|
||||
manufacturer=MANUFACTURER,
|
||||
via_device=(DOMAIN, self._unique_id),
|
||||
)
|
||||
|
||||
# Make the native key name Home Assistant compatible
|
||||
self._attr_translation_key = key_type.lower().replace("/", "_")
|
||||
|
||||
self._key_type = key_type
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Listen to WebSocket Beoremote One key events."""
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._async_update_connection_state,
|
||||
)
|
||||
)
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEO_REMOTE_BUTTON}_{self._key_type}",
|
||||
self._async_handle_event,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -1,4 +1,278 @@
|
||||
{
|
||||
"entity": {
|
||||
"event": {
|
||||
"control_blue": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit0": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_digit9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_down": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func10": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func11": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func12": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func13": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func14": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func15": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func16": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func17": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func18": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func19": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func20": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func21": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func22": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func23": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func24": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func25": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func26": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func27": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_func9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_green": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_left": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_play": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_red": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_rewind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_right": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_select": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_stop": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_up": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_wind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"control_yellow": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_blue": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit0": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_digit9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_down": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func1": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func10": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func11": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func12": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func13": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func14": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func15": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func16": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func17": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func2": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func3": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func4": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func5": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func6": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func7": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func8": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_func9": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_green": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_left": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_play": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_red": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_rewind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_right": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_select": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_stop": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_up": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_wind": {
|
||||
"default": "mdi:remote"
|
||||
},
|
||||
"light_yellow": {
|
||||
"default": "mdi:remote"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"beolink_allstandby": { "service": "mdi:close-circle-multiple-outline" },
|
||||
"beolink_expand": { "service": "mdi:location-enter" },
|
||||
|
||||
@@ -80,6 +80,7 @@ from .const import (
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
FALLBACK_SOURCES,
|
||||
MANUFACTURER,
|
||||
VALID_MEDIA_TYPES,
|
||||
BangOlufsenMediaType,
|
||||
BangOlufsenSource,
|
||||
@@ -201,7 +202,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=f"http://{self._host}/#/",
|
||||
identifiers={(DOMAIN, self._unique_id)},
|
||||
manufacturer="Bang & Olufsen",
|
||||
manufacturer=MANUFACTURER,
|
||||
model=self._model,
|
||||
serial_number=self._unique_id,
|
||||
)
|
||||
@@ -249,7 +250,7 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity):
|
||||
self.async_on_remove(
|
||||
async_dispatcher_connect(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{signal}",
|
||||
f"{DOMAIN}_{self._unique_id}_{signal}",
|
||||
signal_handler,
|
||||
)
|
||||
)
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -2,11 +2,16 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import cast
|
||||
|
||||
from mozart_api.models import PairedRemote
|
||||
from mozart_api.mozart_client import MozartClient
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEVICE_BUTTONS, DOMAIN, BangOlufsenButtons, BangOlufsenModel
|
||||
|
||||
|
||||
def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
@@ -21,3 +26,30 @@ def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry:
|
||||
def get_serial_number_from_jid(jid: str) -> str:
|
||||
"""Get serial number from Beolink JID."""
|
||||
return jid.split(".")[2].split("@")[0]
|
||||
|
||||
|
||||
async def get_remotes(client: MozartClient) -> list[PairedRemote]:
|
||||
"""Get paired remotes."""
|
||||
|
||||
bluetooth_remote_list = await client.get_bluetooth_remotes()
|
||||
|
||||
return [
|
||||
remote
|
||||
for remote in cast(list[PairedRemote], bluetooth_remote_list.items)
|
||||
if remote.serial_number is not None
|
||||
]
|
||||
|
||||
|
||||
def get_device_buttons(model: BangOlufsenModel) -> list[str]:
|
||||
"""Get supported buttons for a given model."""
|
||||
buttons = DEVICE_BUTTONS.copy()
|
||||
|
||||
# Beosound Premiere does not have a bluetooth button
|
||||
if model == BangOlufsenModel.BEOSOUND_PREMIERE:
|
||||
buttons.remove(BangOlufsenButtons.BLUETOOTH)
|
||||
|
||||
# Beoconnect Core does not have any buttons
|
||||
elif model == BangOlufsenModel.BEOCONNECT_CORE:
|
||||
buttons = []
|
||||
|
||||
return buttons
|
||||
|
||||
@@ -6,6 +6,7 @@ import logging
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from mozart_api.models import (
|
||||
BeoRemoteButton,
|
||||
ButtonEvent,
|
||||
ListeningModeProps,
|
||||
PlaybackContentMetadata,
|
||||
@@ -28,11 +29,13 @@ from homeassistant.util.enum import try_parse_enum
|
||||
from .const import (
|
||||
BANG_OLUFSEN_WEBSOCKET_EVENT,
|
||||
CONNECTION_STATUS,
|
||||
DOMAIN,
|
||||
EVENT_TRANSLATION_MAP,
|
||||
BangOlufsenModel,
|
||||
WebsocketNotification,
|
||||
)
|
||||
from .entity import BangOlufsenBase
|
||||
from .util import get_device
|
||||
from .util import get_device, get_remotes
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
@@ -57,6 +60,9 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
self._client.get_active_listening_mode_notifications(
|
||||
self.on_active_listening_mode
|
||||
)
|
||||
self._client.get_beo_remote_button_notifications(
|
||||
self.on_beo_remote_button_notification
|
||||
)
|
||||
self._client.get_button_notifications(self.on_button_notification)
|
||||
|
||||
self._client.get_playback_error_notifications(
|
||||
@@ -87,7 +93,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Update all entities of the connection status."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{CONNECTION_STATUS}",
|
||||
f"{DOMAIN}_{self._unique_id}_{CONNECTION_STATUS}",
|
||||
self._client.websocket_connected,
|
||||
)
|
||||
|
||||
@@ -105,10 +111,22 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send active_listening_mode dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.ACTIVE_LISTENING_MODE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
def on_beo_remote_button_notification(self, notification: BeoRemoteButton) -> None:
|
||||
"""Send beo_remote_button dispatch."""
|
||||
if TYPE_CHECKING:
|
||||
assert notification.type
|
||||
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEO_REMOTE_BUTTON}_{notification.key}",
|
||||
EVENT_TRANSLATION_MAP[notification.type],
|
||||
)
|
||||
|
||||
def on_button_notification(self, notification: ButtonEvent) -> None:
|
||||
"""Send button dispatch."""
|
||||
# State is expected to always be available.
|
||||
@@ -118,11 +136,11 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
# Send to event entity
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BUTTON}_{notification.button}",
|
||||
EVENT_TRANSLATION_MAP[notification.state],
|
||||
)
|
||||
|
||||
def on_notification_notification(
|
||||
async def on_notification_notification(
|
||||
self, notification: WebsocketNotificationTag
|
||||
) -> None:
|
||||
"""Send notification dispatch."""
|
||||
@@ -136,24 +154,51 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
):
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.BEOLINK}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.CONFIGURATION:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.CONFIGURATION}",
|
||||
)
|
||||
elif notification_type is WebsocketNotification.REMOTE_MENU_CHANGED:
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}",
|
||||
)
|
||||
|
||||
# This notification is triggered by a remote pairing, unpairing and connecting to a device
|
||||
# So the current remote devices have to be compared to available remotes to determine action
|
||||
elif notification_type is WebsocketNotification.REMOTE_CONTROL_DEVICES:
|
||||
device_registry = dr.async_get(self.hass)
|
||||
# Get remote devices connected to the device from Home Assistant
|
||||
device_serial_numbers = [
|
||||
device.serial_number
|
||||
for device in device_registry.devices.get_devices_for_config_entry_id(
|
||||
self.entry.entry_id
|
||||
)
|
||||
if device.serial_number is not None
|
||||
and device.model == BangOlufsenModel.BEOREMOTE_ONE
|
||||
]
|
||||
# Get paired remotes from device
|
||||
remote_serial_numbers = [
|
||||
remote.serial_number
|
||||
for remote in await get_remotes(self._client)
|
||||
if remote.serial_number is not None
|
||||
]
|
||||
# Check if number of remote devices correspond to number of paired remotes
|
||||
if len(remote_serial_numbers) != len(device_serial_numbers):
|
||||
_LOGGER.info(
|
||||
"A Beoremote One has been paired or unpaired to %s. Reloading config entry to add device and entities",
|
||||
self.entry.title,
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(self.entry.entry_id)
|
||||
|
||||
def on_playback_error_notification(self, notification: PlaybackError) -> None:
|
||||
"""Send playback_error dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -163,7 +208,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_metadata dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -171,7 +216,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_progress dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -179,7 +224,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_state dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -187,7 +232,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send playback_source dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.PLAYBACK_SOURCE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -195,7 +240,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send source_change dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}",
|
||||
notification,
|
||||
)
|
||||
|
||||
@@ -203,7 +248,7 @@ class BangOlufsenWebsocket(BangOlufsenBase):
|
||||
"""Send volume dispatch."""
|
||||
async_dispatcher_send(
|
||||
self.hass,
|
||||
f"{self._unique_id}_{WebsocketNotification.VOLUME}",
|
||||
f"{DOMAIN}_{self._unique_id}_{WebsocketNotification.VOLUME}",
|
||||
notification,
|
||||
)
|
||||
|
||||
|
||||
@@ -20,7 +20,7 @@
|
||||
"bluetooth-adapters==2.1.0",
|
||||
"bluetooth-auto-recovery==1.5.3",
|
||||
"bluetooth-data-tools==1.28.4",
|
||||
"dbus-fast==3.0.0",
|
||||
"dbus-fast==3.1.2",
|
||||
"habluetooth==5.7.0"
|
||||
]
|
||||
}
|
||||
|
||||
@@ -1 +0,0 @@
|
||||
"""The bluetooth_tracker component."""
|
||||
@@ -1,10 +0,0 @@
|
||||
"""Constants for the Bluetooth Tracker component."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "bluetooth_tracker"
|
||||
SERVICE_UPDATE: Final = "update"
|
||||
|
||||
BT_PREFIX: Final = "BT_"
|
||||
CONF_REQUEST_RSSI: Final = "request_rssi"
|
||||
DEFAULT_DEVICE_ID: Final = -1
|
||||
@@ -1,213 +0,0 @@
|
||||
"""Tracking for bluetooth devices."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Final
|
||||
|
||||
import bluetooth
|
||||
from bt_proximity import BluetoothRSSI
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.device_tracker import (
|
||||
CONF_SCAN_INTERVAL,
|
||||
CONF_TRACK_NEW,
|
||||
DEFAULT_TRACK_NEW,
|
||||
PLATFORM_SCHEMA as DEVICE_TRACKER_PLATFORM_SCHEMA,
|
||||
SCAN_INTERVAL,
|
||||
SourceType,
|
||||
)
|
||||
from homeassistant.components.device_tracker.legacy import (
|
||||
YAML_DEVICES,
|
||||
AsyncSeeCallback,
|
||||
Device,
|
||||
async_load_config,
|
||||
)
|
||||
from homeassistant.const import CONF_DEVICE_ID
|
||||
from homeassistant.core import HomeAssistant, ServiceCall
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
BT_PREFIX,
|
||||
CONF_REQUEST_RSSI,
|
||||
DEFAULT_DEVICE_ID,
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE,
|
||||
)
|
||||
|
||||
_LOGGER: Final = logging.getLogger(__name__)
|
||||
|
||||
PLATFORM_SCHEMA: Final = DEVICE_TRACKER_PLATFORM_SCHEMA.extend(
|
||||
{
|
||||
vol.Optional(CONF_TRACK_NEW): cv.boolean,
|
||||
vol.Optional(CONF_REQUEST_RSSI): cv.boolean,
|
||||
vol.Optional(CONF_DEVICE_ID, default=DEFAULT_DEVICE_ID): vol.All(
|
||||
vol.Coerce(int), vol.Range(min=-1)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def is_bluetooth_device(device: Device) -> bool:
|
||||
"""Check whether a device is a bluetooth device by its mac."""
|
||||
return device.mac is not None and device.mac[:3].upper() == BT_PREFIX
|
||||
|
||||
|
||||
def discover_devices(device_id: int) -> list[tuple[str, str]]:
|
||||
"""Discover Bluetooth devices."""
|
||||
try:
|
||||
result = bluetooth.discover_devices(
|
||||
duration=8,
|
||||
lookup_names=True,
|
||||
flush_cache=True,
|
||||
lookup_class=False,
|
||||
device_id=device_id,
|
||||
)
|
||||
except OSError as ex:
|
||||
# OSError is generally thrown if a bluetooth device isn't found
|
||||
_LOGGER.error("Couldn't discover bluetooth devices: %s", ex)
|
||||
return []
|
||||
_LOGGER.debug("Bluetooth devices discovered = %d", len(result))
|
||||
return result # type: ignore[no-any-return]
|
||||
|
||||
|
||||
async def see_device(
|
||||
hass: HomeAssistant,
|
||||
async_see: AsyncSeeCallback,
|
||||
mac: str,
|
||||
device_name: str,
|
||||
rssi: tuple[int] | None = None,
|
||||
) -> None:
|
||||
"""Mark a device as seen."""
|
||||
attributes = {}
|
||||
if rssi is not None:
|
||||
attributes["rssi"] = rssi
|
||||
|
||||
await async_see(
|
||||
mac=f"{BT_PREFIX}{mac}",
|
||||
host_name=device_name,
|
||||
attributes=attributes,
|
||||
source_type=SourceType.BLUETOOTH,
|
||||
)
|
||||
|
||||
|
||||
async def get_tracking_devices(hass: HomeAssistant) -> tuple[set[str], set[str]]:
|
||||
"""Load all known devices.
|
||||
|
||||
We just need the devices so set consider_home and home range to 0
|
||||
"""
|
||||
yaml_path: str = hass.config.path(YAML_DEVICES)
|
||||
|
||||
devices = await async_load_config(yaml_path, hass, timedelta(0))
|
||||
bluetooth_devices = [device for device in devices if is_bluetooth_device(device)]
|
||||
|
||||
devices_to_track: set[str] = {
|
||||
device.mac[3:]
|
||||
for device in bluetooth_devices
|
||||
if device.track and device.mac is not None
|
||||
}
|
||||
devices_to_not_track: set[str] = {
|
||||
device.mac[3:]
|
||||
for device in bluetooth_devices
|
||||
if not device.track and device.mac is not None
|
||||
}
|
||||
|
||||
return devices_to_track, devices_to_not_track
|
||||
|
||||
|
||||
def lookup_name(mac: str) -> str | None:
|
||||
"""Lookup a Bluetooth device name."""
|
||||
_LOGGER.debug("Scanning %s", mac)
|
||||
return bluetooth.lookup_name(mac, timeout=5) # type: ignore[no-any-return]
|
||||
|
||||
|
||||
async def async_setup_scanner(
|
||||
hass: HomeAssistant,
|
||||
config: ConfigType,
|
||||
async_see: AsyncSeeCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> bool:
|
||||
"""Set up the Bluetooth Scanner."""
|
||||
device_id: int = config[CONF_DEVICE_ID]
|
||||
interval: timedelta = config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL)
|
||||
request_rssi: bool = config.get(CONF_REQUEST_RSSI, False)
|
||||
update_bluetooth_lock = asyncio.Lock()
|
||||
|
||||
# If track new devices is true discover new devices on startup.
|
||||
track_new: bool = config.get(CONF_TRACK_NEW, DEFAULT_TRACK_NEW)
|
||||
_LOGGER.debug("Tracking new devices is set to %s", track_new)
|
||||
|
||||
devices_to_track, devices_to_not_track = await get_tracking_devices(hass)
|
||||
|
||||
if not devices_to_track and not track_new:
|
||||
_LOGGER.debug("No Bluetooth devices to track and not tracking new devices")
|
||||
|
||||
if request_rssi:
|
||||
_LOGGER.debug("Detecting RSSI for devices")
|
||||
|
||||
async def perform_bluetooth_update() -> None:
|
||||
"""Discover Bluetooth devices and update status."""
|
||||
_LOGGER.debug("Performing Bluetooth devices discovery and update")
|
||||
tasks: list[asyncio.Task[None]] = []
|
||||
|
||||
try:
|
||||
if track_new:
|
||||
devices = await hass.async_add_executor_job(discover_devices, device_id)
|
||||
for mac, _device_name in devices:
|
||||
if mac not in devices_to_track and mac not in devices_to_not_track:
|
||||
devices_to_track.add(mac)
|
||||
|
||||
for mac in devices_to_track:
|
||||
friendly_name = await hass.async_add_executor_job(lookup_name, mac)
|
||||
if friendly_name is None:
|
||||
# Could not lookup device name
|
||||
continue
|
||||
|
||||
rssi = None
|
||||
if request_rssi:
|
||||
client = BluetoothRSSI(mac)
|
||||
rssi = await hass.async_add_executor_job(client.request_rssi)
|
||||
client.close()
|
||||
|
||||
tasks.append(
|
||||
asyncio.create_task(
|
||||
see_device(hass, async_see, mac, friendly_name, rssi)
|
||||
)
|
||||
)
|
||||
|
||||
if tasks:
|
||||
await asyncio.wait(tasks)
|
||||
|
||||
except bluetooth.BluetoothError:
|
||||
_LOGGER.exception("Error looking up Bluetooth device")
|
||||
|
||||
async def update_bluetooth(now: datetime | None = None) -> None:
|
||||
"""Lookup Bluetooth devices and update status."""
|
||||
# If an update is in progress, we don't do anything
|
||||
if update_bluetooth_lock.locked():
|
||||
_LOGGER.debug(
|
||||
(
|
||||
"Previous execution of update_bluetooth is taking longer than the"
|
||||
" scheduled update of interval %s"
|
||||
),
|
||||
interval,
|
||||
)
|
||||
return
|
||||
|
||||
async with update_bluetooth_lock:
|
||||
await perform_bluetooth_update()
|
||||
|
||||
async def handle_manual_update_bluetooth(call: ServiceCall) -> None:
|
||||
"""Update bluetooth devices on demand."""
|
||||
await update_bluetooth()
|
||||
|
||||
hass.async_create_task(update_bluetooth())
|
||||
async_track_time_interval(hass, update_bluetooth, interval)
|
||||
|
||||
hass.services.async_register(DOMAIN, SERVICE_UPDATE, handle_manual_update_bluetooth)
|
||||
|
||||
return True
|
||||
@@ -1,7 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"update": {
|
||||
"service": "mdi:update"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,10 +0,0 @@
|
||||
{
|
||||
"domain": "bluetooth_tracker",
|
||||
"name": "Bluetooth Tracker",
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/bluetooth_tracker",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bluetooth", "bt_proximity"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["bt-proximity==0.2.1", "PyBluez==0.22"]
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
update:
|
||||
@@ -1,8 +0,0 @@
|
||||
{
|
||||
"services": {
|
||||
"update": {
|
||||
"description": "Triggers manual tracker update.",
|
||||
"name": "Update"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -96,5 +96,16 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:power-on"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"started_heating": {
|
||||
"trigger": "mdi:fire"
|
||||
},
|
||||
"turned_off": {
|
||||
"trigger": "mdi:power-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:power-on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted climates to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"set_hvac_mode": "Change HVAC mode on {entity_name}",
|
||||
@@ -187,6 +191,13 @@
|
||||
"heat_cool": "Heat/cool",
|
||||
"off": "[%key:common::state::off%]"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -285,5 +296,40 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Climate"
|
||||
"title": "Climate",
|
||||
"triggers": {
|
||||
"started_heating": {
|
||||
"description": "Triggers when a climate starts to heat.",
|
||||
"description_configured": "[%key:component::climate::triggers::started_heating::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate starts to heat"
|
||||
},
|
||||
"turned_off": {
|
||||
"description": "Triggers when a climate is turned off.",
|
||||
"description_configured": "[%key:component::climate::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate is turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a climate is turned on.",
|
||||
"description_configured": "[%key:component::climate::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::climate::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::climate::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a climate is turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
37
homeassistant/components/climate/trigger.py
Normal file
37
homeassistant/components/climate/trigger.py
Normal file
@@ -0,0 +1,37 @@
|
||||
"""Provides triggers for climates."""
|
||||
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import (
|
||||
Trigger,
|
||||
make_conditional_entity_state_trigger,
|
||||
make_entity_state_attribute_trigger,
|
||||
make_entity_state_trigger,
|
||||
)
|
||||
|
||||
from .const import ATTR_HVAC_ACTION, DOMAIN, HVACAction, HVACMode
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, HVACMode.OFF),
|
||||
"turned_on": make_conditional_entity_state_trigger(
|
||||
DOMAIN,
|
||||
from_states={
|
||||
HVACMode.OFF,
|
||||
},
|
||||
to_states={
|
||||
HVACMode.AUTO,
|
||||
HVACMode.COOL,
|
||||
HVACMode.DRY,
|
||||
HVACMode.FAN_ONLY,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.HEAT_COOL,
|
||||
},
|
||||
),
|
||||
"started_heating": make_entity_state_attribute_trigger(
|
||||
DOMAIN, ATTR_HVAC_ACTION, HVACAction.HEATING
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for climates."""
|
||||
return TRIGGERS
|
||||
19
homeassistant/components/climate/triggers.yaml
Normal file
19
homeassistant/components/climate/triggers.yaml
Normal file
@@ -0,0 +1,19 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: climate
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
|
||||
started_heating: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
turned_on: *trigger_common
|
||||
@@ -77,7 +77,12 @@ from .subscription import async_subscription_info
|
||||
|
||||
DEFAULT_MODE = MODE_PROD
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.STT, Platform.TTS]
|
||||
PLATFORMS = [
|
||||
Platform.AI_TASK,
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.STT,
|
||||
Platform.TTS,
|
||||
]
|
||||
|
||||
SERVICE_REMOTE_CONNECT = "remote_connect"
|
||||
SERVICE_REMOTE_DISCONNECT = "remote_disconnect"
|
||||
|
||||
200
homeassistant/components/cloud/ai_task.py
Normal file
200
homeassistant/components/cloud/ai_task.py
Normal file
@@ -0,0 +1,200 @@
|
||||
"""AI Task integration for Home Assistant Cloud."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import io
|
||||
from json import JSONDecodeError
|
||||
import logging
|
||||
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMError,
|
||||
LLMImageAttachment,
|
||||
LLMRateLimitError,
|
||||
LLMResponseError,
|
||||
LLMServiceError,
|
||||
)
|
||||
from PIL import Image
|
||||
|
||||
from homeassistant.components import ai_task, conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util.json import json_loads
|
||||
|
||||
from .const import AI_TASK_ENTITY_UNIQUE_ID, DATA_CLOUD
|
||||
from .entity import BaseCloudLLMEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _convert_image_for_editing(data: bytes) -> tuple[bytes, str]:
|
||||
"""Ensure the image data is in a format accepted by OpenAI image edits."""
|
||||
stream = io.BytesIO(data)
|
||||
with Image.open(stream) as img:
|
||||
mode = img.mode
|
||||
if mode not in ("RGBA", "LA", "L"):
|
||||
img = img.convert("RGBA")
|
||||
|
||||
output = io.BytesIO()
|
||||
if img.mode in ("RGBA", "LA", "L"):
|
||||
img.save(output, format="PNG")
|
||||
return output.getvalue(), "image/png"
|
||||
|
||||
img.save(output, format=img.format or "PNG")
|
||||
return output.getvalue(), f"image/{(img.format or 'png').lower()}"
|
||||
|
||||
|
||||
async def async_prepare_image_generation_attachments(
|
||||
hass: HomeAssistant, attachments: list[conversation.Attachment]
|
||||
) -> list[LLMImageAttachment]:
|
||||
"""Load attachment data for image generation."""
|
||||
|
||||
def prepare() -> list[LLMImageAttachment]:
|
||||
items: list[LLMImageAttachment] = []
|
||||
for attachment in attachments:
|
||||
if not attachment.mime_type or not attachment.mime_type.startswith(
|
||||
"image/"
|
||||
):
|
||||
raise HomeAssistantError(
|
||||
"Only image attachments are supported for image generation"
|
||||
)
|
||||
path = attachment.path
|
||||
if not path.exists():
|
||||
raise HomeAssistantError(f"`{path}` does not exist")
|
||||
|
||||
data = path.read_bytes()
|
||||
mime_type = attachment.mime_type
|
||||
|
||||
try:
|
||||
data, mime_type = _convert_image_for_editing(data)
|
||||
except HomeAssistantError:
|
||||
raise
|
||||
except Exception as err:
|
||||
raise HomeAssistantError("Failed to process image attachment") from err
|
||||
|
||||
items.append(
|
||||
LLMImageAttachment(
|
||||
filename=path.name,
|
||||
mime_type=mime_type,
|
||||
data=data,
|
||||
)
|
||||
)
|
||||
|
||||
return items
|
||||
|
||||
return await hass.async_add_executor_job(prepare)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Home Assistant Cloud AI Task entity."""
|
||||
cloud = hass.data[DATA_CLOUD]
|
||||
try:
|
||||
await cloud.llm.async_ensure_token()
|
||||
except LLMError:
|
||||
return
|
||||
|
||||
async_add_entities([CloudLLMTaskEntity(cloud, config_entry)])
|
||||
|
||||
|
||||
class CloudLLMTaskEntity(ai_task.AITaskEntity, BaseCloudLLMEntity):
|
||||
"""Home Assistant Cloud AI Task entity."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_supported_features = (
|
||||
ai_task.AITaskEntityFeature.GENERATE_DATA
|
||||
| ai_task.AITaskEntityFeature.GENERATE_IMAGE
|
||||
| ai_task.AITaskEntityFeature.SUPPORT_ATTACHMENTS
|
||||
)
|
||||
_attr_translation_key = "cloud_ai"
|
||||
_attr_unique_id = AI_TASK_ENTITY_UNIQUE_ID
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return if the entity is available."""
|
||||
return self._cloud.is_logged_in and self._cloud.valid_subscription
|
||||
|
||||
async def _async_generate_data(
|
||||
self,
|
||||
task: ai_task.GenDataTask,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenDataTaskResult:
|
||||
"""Handle a generate data task."""
|
||||
await self._async_handle_chat_log(
|
||||
"ai_task", chat_log, task.name, task.structure
|
||||
)
|
||||
|
||||
if not isinstance(chat_log.content[-1], conversation.AssistantContent):
|
||||
raise HomeAssistantError(
|
||||
"Last content in chat log is not an AssistantContent"
|
||||
)
|
||||
|
||||
text = chat_log.content[-1].content or ""
|
||||
|
||||
if not task.structure:
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=text,
|
||||
)
|
||||
try:
|
||||
data = json_loads(text)
|
||||
except JSONDecodeError as err:
|
||||
_LOGGER.error(
|
||||
"Failed to parse JSON response: %s. Response: %s",
|
||||
err,
|
||||
text,
|
||||
)
|
||||
raise HomeAssistantError("Error with OpenAI structured response") from err
|
||||
|
||||
return ai_task.GenDataTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
data=data,
|
||||
)
|
||||
|
||||
async def _async_generate_image(
|
||||
self,
|
||||
task: ai_task.GenImageTask,
|
||||
chat_log: conversation.ChatLog,
|
||||
) -> ai_task.GenImageTaskResult:
|
||||
"""Handle a generate image task."""
|
||||
attachments: list[LLMImageAttachment] | None = None
|
||||
if task.attachments:
|
||||
attachments = await async_prepare_image_generation_attachments(
|
||||
self.hass, task.attachments
|
||||
)
|
||||
|
||||
try:
|
||||
if attachments is None:
|
||||
image = await self._cloud.llm.async_generate_image(
|
||||
prompt=task.instructions,
|
||||
)
|
||||
else:
|
||||
image = await self._cloud.llm.async_edit_image(
|
||||
prompt=task.instructions,
|
||||
attachments=attachments,
|
||||
)
|
||||
except LLMAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("Cloud LLM authentication failed") from err
|
||||
except LLMRateLimitError as err:
|
||||
raise HomeAssistantError("Cloud LLM is rate limited") from err
|
||||
except LLMResponseError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
except LLMServiceError as err:
|
||||
raise HomeAssistantError("Error talking to Cloud LLM") from err
|
||||
except LLMError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
|
||||
return ai_task.GenImageTaskResult(
|
||||
conversation_id=chat_log.conversation_id,
|
||||
mime_type=image["mime_type"],
|
||||
image_data=image["image_data"],
|
||||
model=image.get("model"),
|
||||
width=image.get("width"),
|
||||
height=image.get("height"),
|
||||
revised_prompt=image.get("revised_prompt"),
|
||||
)
|
||||
@@ -91,6 +91,7 @@ DISPATCHER_REMOTE_UPDATE: SignalType[Any] = SignalType("cloud_remote_update")
|
||||
|
||||
STT_ENTITY_UNIQUE_ID = "cloud-speech-to-text"
|
||||
TTS_ENTITY_UNIQUE_ID = "cloud-text-to-speech"
|
||||
AI_TASK_ENTITY_UNIQUE_ID = "cloud-ai-task"
|
||||
|
||||
LOGIN_MFA_TIMEOUT = 60
|
||||
|
||||
|
||||
543
homeassistant/components/cloud/entity.py
Normal file
543
homeassistant/components/cloud/entity.py
Normal file
@@ -0,0 +1,543 @@
|
||||
"""Helpers for cloud LLM chat handling."""
|
||||
|
||||
import base64
|
||||
from collections.abc import AsyncGenerator, Callable
|
||||
from enum import Enum
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
from typing import Any, Literal, cast
|
||||
|
||||
from hass_nabucasa import Cloud
|
||||
from hass_nabucasa.llm import (
|
||||
LLMAuthenticationError,
|
||||
LLMError,
|
||||
LLMRateLimitError,
|
||||
LLMResponseError,
|
||||
LLMServiceError,
|
||||
)
|
||||
from litellm import ResponseFunctionToolCall, ResponsesAPIStreamEvents
|
||||
from openai.types.responses import (
|
||||
FunctionToolParam,
|
||||
ResponseReasoningItem,
|
||||
ToolParam,
|
||||
WebSearchToolParam,
|
||||
)
|
||||
import voluptuous as vol
|
||||
from voluptuous_openapi import convert
|
||||
|
||||
from homeassistant.components import conversation
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, HomeAssistantError
|
||||
from homeassistant.helpers import llm
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.util import slugify
|
||||
|
||||
from .client import CloudClient
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
_MAX_TOOL_ITERATIONS = 10
|
||||
|
||||
|
||||
class ResponseItemType(str, Enum):
|
||||
"""Response item types."""
|
||||
|
||||
FUNCTION_CALL = "function_call"
|
||||
MESSAGE = "message"
|
||||
REASONING = "reasoning"
|
||||
WEB_SEARCH_CALL = "web_search_call"
|
||||
IMAGE = "image"
|
||||
|
||||
|
||||
def _convert_content_to_chat_message(
|
||||
content: conversation.Content,
|
||||
) -> dict[str, Any] | None:
|
||||
"""Convert ChatLog content to a responses message."""
|
||||
if content.role not in ("user", "system", "tool_result", "assistant"):
|
||||
return None
|
||||
|
||||
text_content = cast(
|
||||
conversation.SystemContent
|
||||
| conversation.UserContent
|
||||
| conversation.AssistantContent,
|
||||
content,
|
||||
)
|
||||
|
||||
if not text_content.content:
|
||||
return None
|
||||
|
||||
content_type = "output_text" if text_content.role == "assistant" else "input_text"
|
||||
|
||||
return {
|
||||
"role": text_content.role,
|
||||
"content": [
|
||||
{
|
||||
"type": content_type,
|
||||
"text": text_content.content,
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def _format_tool(
|
||||
tool: llm.Tool,
|
||||
custom_serializer: Callable[[Any], Any] | None,
|
||||
) -> ToolParam:
|
||||
"""Format a Home Assistant tool for the OpenAI Responses API."""
|
||||
parameters = convert(tool.parameters, custom_serializer=custom_serializer)
|
||||
|
||||
spec: FunctionToolParam = {
|
||||
"type": "function",
|
||||
"name": tool.name,
|
||||
"strict": False,
|
||||
"description": tool.description,
|
||||
"parameters": parameters,
|
||||
}
|
||||
|
||||
return spec
|
||||
|
||||
|
||||
def _adjust_schema(schema: dict[str, Any]) -> None:
|
||||
"""Adjust the schema to be compatible with OpenAI API."""
|
||||
if schema["type"] == "object":
|
||||
schema.setdefault("strict", True)
|
||||
schema.setdefault("additionalProperties", False)
|
||||
if "properties" not in schema:
|
||||
return
|
||||
|
||||
if "required" not in schema:
|
||||
schema["required"] = []
|
||||
|
||||
# Ensure all properties are required
|
||||
for prop, prop_info in schema["properties"].items():
|
||||
_adjust_schema(prop_info)
|
||||
if prop not in schema["required"]:
|
||||
prop_info["type"] = [prop_info["type"], "null"]
|
||||
schema["required"].append(prop)
|
||||
|
||||
elif schema["type"] == "array":
|
||||
if "items" not in schema:
|
||||
return
|
||||
|
||||
_adjust_schema(schema["items"])
|
||||
|
||||
|
||||
def _format_structured_output(
|
||||
schema: vol.Schema, llm_api: llm.APIInstance | None
|
||||
) -> dict[str, Any]:
|
||||
"""Format the schema to be compatible with OpenAI API."""
|
||||
result: dict[str, Any] = convert(
|
||||
schema,
|
||||
custom_serializer=(
|
||||
llm_api.custom_serializer if llm_api else llm.selector_serializer
|
||||
),
|
||||
)
|
||||
|
||||
_ensure_schema_constraints(result)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _ensure_schema_constraints(schema: dict[str, Any]) -> None:
|
||||
"""Ensure generated schemas match the Responses API expectations."""
|
||||
schema_type = schema.get("type")
|
||||
|
||||
if schema_type == "object":
|
||||
schema.setdefault("additionalProperties", False)
|
||||
properties = schema.get("properties")
|
||||
if isinstance(properties, dict):
|
||||
for property_schema in properties.values():
|
||||
if isinstance(property_schema, dict):
|
||||
_ensure_schema_constraints(property_schema)
|
||||
elif schema_type == "array":
|
||||
items = schema.get("items")
|
||||
if isinstance(items, dict):
|
||||
_ensure_schema_constraints(items)
|
||||
|
||||
|
||||
# Borrowed and adapted from openai_conversation component
|
||||
async def _transform_stream( # noqa: C901 - This is complex, but better to have it in one place
|
||||
chat_log: conversation.ChatLog,
|
||||
stream: Any,
|
||||
remove_citations: bool = False,
|
||||
) -> AsyncGenerator[
|
||||
conversation.AssistantContentDeltaDict | conversation.ToolResultContentDeltaDict
|
||||
]:
|
||||
"""Transform stream result into HA format."""
|
||||
last_summary_index = None
|
||||
last_role: Literal["assistant", "tool_result"] | None = None
|
||||
current_tool_call: ResponseFunctionToolCall | None = None
|
||||
|
||||
# Non-reasoning models don't follow our request to remove citations, so we remove
|
||||
# them manually here. They always follow the same pattern: the citation is always
|
||||
# in parentheses in Markdown format, the citation is always in a single delta event,
|
||||
# and sometimes the closing parenthesis is split into a separate delta event.
|
||||
remove_parentheses: bool = False
|
||||
citation_regexp = re.compile(r"\(\[([^\]]+)\]\((https?:\/\/[^\)]+)\)")
|
||||
|
||||
async for event in stream:
|
||||
event_type = getattr(event, "type", None)
|
||||
event_item = getattr(event, "item", None)
|
||||
event_item_type = getattr(event_item, "type", None) if event_item else None
|
||||
|
||||
_LOGGER.debug(
|
||||
"Event[%s] | item: %s",
|
||||
event_type,
|
||||
event_item_type,
|
||||
)
|
||||
|
||||
if event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_ADDED:
|
||||
# Detect function_call even when it's a BaseLiteLLMOpenAIResponseObject
|
||||
if event_item_type == ResponseItemType.FUNCTION_CALL:
|
||||
# OpenAI has tool calls as individual events
|
||||
# while HA puts tool calls inside the assistant message.
|
||||
# We turn them into individual assistant content for HA
|
||||
# to ensure that tools are called as soon as possible.
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
current_tool_call = cast(ResponseFunctionToolCall, event.item)
|
||||
elif (
|
||||
event_item_type == ResponseItemType.MESSAGE
|
||||
or (
|
||||
event_item_type == ResponseItemType.REASONING
|
||||
and last_summary_index is not None
|
||||
) # Subsequent ResponseReasoningItem
|
||||
or last_role != "assistant"
|
||||
):
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = None
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_ITEM_DONE:
|
||||
if event_item_type == ResponseItemType.REASONING:
|
||||
encrypted_content = getattr(event.item, "encrypted_content", None)
|
||||
summary = getattr(event.item, "summary", []) or []
|
||||
|
||||
yield {
|
||||
"native": ResponseReasoningItem(
|
||||
type="reasoning",
|
||||
id=event.item.id,
|
||||
summary=[],
|
||||
encrypted_content=encrypted_content,
|
||||
)
|
||||
}
|
||||
|
||||
last_summary_index = len(summary) - 1 if summary else None
|
||||
elif event_item_type == ResponseItemType.WEB_SEARCH_CALL:
|
||||
action = getattr(event.item, "action", None)
|
||||
if isinstance(action, dict):
|
||||
action_dict = action
|
||||
elif action is not None:
|
||||
action_dict = action.to_dict()
|
||||
else:
|
||||
action_dict = {}
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=event.item.id,
|
||||
tool_name="web_search_call",
|
||||
tool_args={"action": action_dict},
|
||||
external=True,
|
||||
)
|
||||
]
|
||||
}
|
||||
yield {
|
||||
"role": "tool_result",
|
||||
"tool_call_id": event.item.id,
|
||||
"tool_name": "web_search_call",
|
||||
"tool_result": {"status": event.item.status},
|
||||
}
|
||||
last_role = "tool_result"
|
||||
elif event_item_type == ResponseItemType.IMAGE:
|
||||
yield {"native": event.item}
|
||||
last_summary_index = -1 # Trigger new assistant message on next turn
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.OUTPUT_TEXT_DELTA:
|
||||
data = event.delta
|
||||
if remove_parentheses:
|
||||
data = data.removeprefix(")")
|
||||
remove_parentheses = False
|
||||
elif remove_citations and (match := citation_regexp.search(data)):
|
||||
match_start, match_end = match.span()
|
||||
# remove leading space if any
|
||||
if data[match_start - 1 : match_start] == " ":
|
||||
match_start -= 1
|
||||
# remove closing parenthesis:
|
||||
if data[match_end : match_end + 1] == ")":
|
||||
match_end += 1
|
||||
else:
|
||||
remove_parentheses = True
|
||||
data = data[:match_start] + data[match_end:]
|
||||
if data:
|
||||
yield {"content": data}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.REASONING_SUMMARY_TEXT_DELTA:
|
||||
# OpenAI can output several reasoning summaries
|
||||
# in a single ResponseReasoningItem. We split them as separate
|
||||
# AssistantContent messages. Only last of them will have
|
||||
# the reasoning `native` field set.
|
||||
if (
|
||||
last_summary_index is not None
|
||||
and event.summary_index != last_summary_index
|
||||
):
|
||||
yield {"role": "assistant"}
|
||||
last_role = "assistant"
|
||||
last_summary_index = event.summary_index
|
||||
yield {"thinking_content": event.delta}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DELTA:
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.arguments += event.delta
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.WEB_SEARCH_CALL_SEARCHING:
|
||||
yield {"role": "assistant"}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.FUNCTION_CALL_ARGUMENTS_DONE:
|
||||
if current_tool_call is not None:
|
||||
current_tool_call.status = "completed"
|
||||
|
||||
raw_args = json.loads(current_tool_call.arguments)
|
||||
for key in ("area", "floor"):
|
||||
if key in raw_args and not raw_args[key]:
|
||||
# Remove keys that are "" or None
|
||||
raw_args.pop(key, None)
|
||||
|
||||
yield {
|
||||
"tool_calls": [
|
||||
llm.ToolInput(
|
||||
id=current_tool_call.call_id,
|
||||
tool_name=current_tool_call.name,
|
||||
tool_args=raw_args,
|
||||
)
|
||||
]
|
||||
}
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_COMPLETED:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_INCOMPLETE:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
if (
|
||||
event.response.incomplete_details
|
||||
and event.response.incomplete_details.reason
|
||||
):
|
||||
reason: str = event.response.incomplete_details.reason
|
||||
else:
|
||||
reason = "unknown reason"
|
||||
|
||||
if reason == "max_output_tokens":
|
||||
reason = "max output tokens reached"
|
||||
elif reason == "content_filter":
|
||||
reason = "content filter triggered"
|
||||
|
||||
raise HomeAssistantError(f"OpenAI response incomplete: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.RESPONSE_FAILED:
|
||||
if event.response.usage is not None:
|
||||
chat_log.async_trace(
|
||||
{
|
||||
"stats": {
|
||||
"input_tokens": event.response.usage.input_tokens,
|
||||
"output_tokens": event.response.usage.output_tokens,
|
||||
}
|
||||
}
|
||||
)
|
||||
reason = "unknown reason"
|
||||
if event.response.error is not None:
|
||||
reason = event.response.error.message
|
||||
raise HomeAssistantError(f"OpenAI response failed: {reason}")
|
||||
|
||||
elif event_type == ResponsesAPIStreamEvents.ERROR:
|
||||
raise HomeAssistantError(f"OpenAI response error: {event.message}")
|
||||
|
||||
|
||||
class BaseCloudLLMEntity(Entity):
|
||||
"""Cloud LLM conversation agent."""
|
||||
|
||||
def __init__(self, cloud: Cloud[CloudClient], config_entry: ConfigEntry) -> None:
|
||||
"""Initialize the entity."""
|
||||
self._cloud = cloud
|
||||
self._entry = config_entry
|
||||
|
||||
async def _prepare_chat_for_generation(
|
||||
self,
|
||||
chat_log: conversation.ChatLog,
|
||||
response_format: dict[str, Any] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Prepare kwargs for Cloud LLM from the chat log."""
|
||||
|
||||
messages = [
|
||||
message
|
||||
for content in chat_log.content
|
||||
if (message := _convert_content_to_chat_message(content))
|
||||
]
|
||||
|
||||
if not messages or messages[-1]["role"] != "user":
|
||||
raise HomeAssistantError("No user prompt found")
|
||||
|
||||
last_content = chat_log.content[-1]
|
||||
if last_content.role == "user" and last_content.attachments:
|
||||
files = await self._async_prepare_files_for_prompt(last_content.attachments)
|
||||
user_message = messages[-1]
|
||||
current_content = user_message.get("content", [])
|
||||
user_message["content"] = [*(current_content or []), *files]
|
||||
|
||||
tools: list[ToolParam] = []
|
||||
tool_choice: str | None = None
|
||||
|
||||
if chat_log.llm_api:
|
||||
ha_tools: list[ToolParam] = [
|
||||
_format_tool(tool, chat_log.llm_api.custom_serializer)
|
||||
for tool in chat_log.llm_api.tools
|
||||
]
|
||||
|
||||
if ha_tools:
|
||||
if not chat_log.unresponded_tool_results:
|
||||
tools = ha_tools
|
||||
tool_choice = "auto"
|
||||
else:
|
||||
tools = []
|
||||
tool_choice = "none"
|
||||
|
||||
web_search = WebSearchToolParam(
|
||||
type="web_search",
|
||||
search_context_size="medium",
|
||||
)
|
||||
tools.append(web_search)
|
||||
|
||||
response_kwargs: dict[str, Any] = {
|
||||
"messages": messages,
|
||||
"conversation_id": chat_log.conversation_id,
|
||||
}
|
||||
|
||||
if response_format is not None:
|
||||
response_kwargs["response_format"] = response_format
|
||||
if tools is not None:
|
||||
response_kwargs["tools"] = tools
|
||||
if tool_choice is not None:
|
||||
response_kwargs["tool_choice"] = tool_choice
|
||||
|
||||
response_kwargs["stream"] = True
|
||||
|
||||
return response_kwargs
|
||||
|
||||
async def _async_prepare_files_for_prompt(
|
||||
self,
|
||||
attachments: list[conversation.Attachment],
|
||||
) -> list[dict[str, Any]]:
|
||||
"""Prepare files for multimodal prompts."""
|
||||
|
||||
def prepare() -> list[dict[str, Any]]:
|
||||
content: list[dict[str, Any]] = []
|
||||
for attachment in attachments:
|
||||
mime_type = attachment.mime_type
|
||||
path = attachment.path
|
||||
if not path.exists():
|
||||
raise HomeAssistantError(f"`{path}` does not exist")
|
||||
|
||||
data = base64.b64encode(path.read_bytes()).decode("utf-8")
|
||||
if mime_type and mime_type.startswith("image/"):
|
||||
content.append(
|
||||
{
|
||||
"type": "input_image",
|
||||
"image_url": f"data:{mime_type};base64,{data}",
|
||||
"detail": "auto",
|
||||
}
|
||||
)
|
||||
elif mime_type and mime_type.startswith("application/pdf"):
|
||||
content.append(
|
||||
{
|
||||
"type": "input_file",
|
||||
"filename": str(path.name),
|
||||
"file_data": f"data:{mime_type};base64,{data}",
|
||||
}
|
||||
)
|
||||
else:
|
||||
raise HomeAssistantError(
|
||||
"Only images and PDF are currently supported as attachments"
|
||||
)
|
||||
|
||||
return content
|
||||
|
||||
return await self.hass.async_add_executor_job(prepare)
|
||||
|
||||
async def _async_handle_chat_log(
|
||||
self,
|
||||
type: Literal["ai_task", "conversation"],
|
||||
chat_log: conversation.ChatLog,
|
||||
structure_name: str | None = None,
|
||||
structure: vol.Schema | None = None,
|
||||
) -> None:
|
||||
"""Generate a response for the chat log."""
|
||||
|
||||
for _ in range(_MAX_TOOL_ITERATIONS):
|
||||
response_format: dict[str, Any] | None = None
|
||||
if structure and structure_name:
|
||||
response_format = {
|
||||
"type": "json_schema",
|
||||
"json_schema": {
|
||||
"name": slugify(structure_name),
|
||||
"schema": _format_structured_output(
|
||||
structure, chat_log.llm_api
|
||||
),
|
||||
"strict": True,
|
||||
},
|
||||
}
|
||||
|
||||
response_kwargs = await self._prepare_chat_for_generation(
|
||||
chat_log,
|
||||
response_format,
|
||||
)
|
||||
|
||||
try:
|
||||
if type == "conversation":
|
||||
raw_stream = await self._cloud.llm.async_process_conversation(
|
||||
**response_kwargs,
|
||||
)
|
||||
else:
|
||||
raw_stream = await self._cloud.llm.async_generate_data(
|
||||
**response_kwargs,
|
||||
)
|
||||
|
||||
async for _ in chat_log.async_add_delta_content_stream(
|
||||
agent_id=self.entity_id,
|
||||
stream=_transform_stream(
|
||||
chat_log,
|
||||
raw_stream,
|
||||
True,
|
||||
),
|
||||
):
|
||||
pass
|
||||
|
||||
except LLMAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed("Cloud LLM authentication failed") from err
|
||||
except LLMRateLimitError as err:
|
||||
raise HomeAssistantError("Cloud LLM is rate limited") from err
|
||||
except LLMResponseError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
except LLMServiceError as err:
|
||||
raise HomeAssistantError("Error talking to Cloud LLM") from err
|
||||
except LLMError as err:
|
||||
raise HomeAssistantError(str(err)) from err
|
||||
|
||||
if not chat_log.unresponded_tool_results:
|
||||
break
|
||||
@@ -1,5 +1,7 @@
|
||||
"""Helpers for the cloud component."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import deque
|
||||
import logging
|
||||
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.5.1"],
|
||||
"requirements": ["hass-nabucasa==1.6.1"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -1,4 +1,11 @@
|
||||
{
|
||||
"entity": {
|
||||
"ai_task": {
|
||||
"cloud_ai": {
|
||||
"name": "Home Assistant Cloud AI"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"backup_size_too_large": {
|
||||
"message": "The backup size of {size}GB is too large to be uploaded to Home Assistant Cloud."
|
||||
|
||||
@@ -7,6 +7,7 @@ from collections.abc import AsyncGenerator, AsyncIterable, Callable, Generator
|
||||
from contextlib import contextmanager
|
||||
from contextvars import ContextVar
|
||||
from dataclasses import asdict, dataclass, field, replace
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any, Literal, TypedDict, cast
|
||||
@@ -16,14 +17,18 @@ import voluptuous as vol
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.exceptions import HomeAssistantError, TemplateError
|
||||
from homeassistant.helpers import chat_session, frame, intent, llm, template
|
||||
from homeassistant.util.dt import utcnow
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.json import JsonObjectType
|
||||
|
||||
from . import trace
|
||||
from .const import ChatLogEventType
|
||||
from .models import ConversationInput, ConversationResult
|
||||
|
||||
DATA_CHAT_LOGS: HassKey[dict[str, ChatLog]] = HassKey("conversation_chat_logs")
|
||||
|
||||
DATA_SUBSCRIPTIONS: HassKey[
|
||||
list[Callable[[str, ChatLogEventType, dict[str, Any]], None]]
|
||||
] = HassKey("conversation_chat_log_subscriptions")
|
||||
LOGGER = logging.getLogger(__name__)
|
||||
|
||||
current_chat_log: ContextVar[ChatLog | None] = ContextVar(
|
||||
@@ -31,6 +36,40 @@ current_chat_log: ContextVar[ChatLog | None] = ContextVar(
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def async_subscribe_chat_logs(
|
||||
hass: HomeAssistant,
|
||||
callback_func: Callable[[str, ChatLogEventType, dict[str, Any]], None],
|
||||
) -> Callable[[], None]:
|
||||
"""Subscribe to all chat logs."""
|
||||
subscriptions = hass.data.get(DATA_SUBSCRIPTIONS)
|
||||
if subscriptions is None:
|
||||
subscriptions = []
|
||||
hass.data[DATA_SUBSCRIPTIONS] = subscriptions
|
||||
|
||||
subscriptions.append(callback_func)
|
||||
|
||||
@callback
|
||||
def unsubscribe() -> None:
|
||||
"""Unsubscribe from chat logs."""
|
||||
subscriptions.remove(callback_func)
|
||||
|
||||
return unsubscribe
|
||||
|
||||
|
||||
@callback
|
||||
def _async_notify_subscribers(
|
||||
hass: HomeAssistant,
|
||||
conversation_id: str,
|
||||
event_type: ChatLogEventType,
|
||||
data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Notify subscribers of a chat log event."""
|
||||
if subscriptions := hass.data.get(DATA_SUBSCRIPTIONS):
|
||||
for callback_func in subscriptions:
|
||||
callback_func(conversation_id, event_type, data)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def async_get_chat_log(
|
||||
hass: HomeAssistant,
|
||||
@@ -63,6 +102,8 @@ def async_get_chat_log(
|
||||
all_chat_logs = {}
|
||||
hass.data[DATA_CHAT_LOGS] = all_chat_logs
|
||||
|
||||
is_new_log = session.conversation_id not in all_chat_logs
|
||||
|
||||
if chat_log := all_chat_logs.get(session.conversation_id):
|
||||
chat_log = replace(chat_log, content=chat_log.content.copy())
|
||||
else:
|
||||
@@ -71,6 +112,15 @@ def async_get_chat_log(
|
||||
if chat_log_delta_listener:
|
||||
chat_log.delta_listener = chat_log_delta_listener
|
||||
|
||||
# Fire CREATED event for new chat logs before any content is added
|
||||
if is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.CREATED,
|
||||
{"chat_log": chat_log.as_dict()},
|
||||
)
|
||||
|
||||
if user_input is not None:
|
||||
chat_log.async_add_user_content(UserContent(content=user_input.text))
|
||||
|
||||
@@ -84,14 +134,28 @@ def async_get_chat_log(
|
||||
LOGGER.debug(
|
||||
"Chat Log opened but no assistant message was added, ignoring update"
|
||||
)
|
||||
# If this was a new log but nothing was added, fire DELETED to clean up
|
||||
if is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.DELETED,
|
||||
{},
|
||||
)
|
||||
return
|
||||
|
||||
if session.conversation_id not in all_chat_logs:
|
||||
if is_new_log:
|
||||
|
||||
@callback
|
||||
def do_cleanup() -> None:
|
||||
"""Handle cleanup."""
|
||||
all_chat_logs.pop(session.conversation_id)
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.DELETED,
|
||||
{},
|
||||
)
|
||||
|
||||
session.async_on_cleanup(do_cleanup)
|
||||
|
||||
@@ -100,6 +164,16 @@ def async_get_chat_log(
|
||||
|
||||
all_chat_logs[session.conversation_id] = chat_log
|
||||
|
||||
# For new logs, CREATED was already fired before content was added
|
||||
# For existing logs, fire UPDATED
|
||||
if not is_new_log:
|
||||
_async_notify_subscribers(
|
||||
hass,
|
||||
session.conversation_id,
|
||||
ChatLogEventType.UPDATED,
|
||||
{"chat_log": chat_log.as_dict()},
|
||||
)
|
||||
|
||||
|
||||
class ConverseError(HomeAssistantError):
|
||||
"""Error during initialization of conversation.
|
||||
@@ -129,6 +203,15 @@ class SystemContent:
|
||||
|
||||
role: Literal["system"] = field(init=False, default="system")
|
||||
content: str
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
return {
|
||||
"role": self.role,
|
||||
"content": self.content,
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -138,6 +221,20 @@ class UserContent:
|
||||
role: Literal["user"] = field(init=False, default="user")
|
||||
content: str
|
||||
attachments: list[Attachment] | None = field(default=None)
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
result: dict[str, Any] = {
|
||||
"role": self.role,
|
||||
"content": self.content,
|
||||
"created": self.created,
|
||||
}
|
||||
if self.attachments:
|
||||
result["attachments"] = [
|
||||
attachment.as_dict() for attachment in self.attachments
|
||||
]
|
||||
return result
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -153,6 +250,14 @@ class Attachment:
|
||||
path: Path
|
||||
"""Path to the attachment on disk."""
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the attachment."""
|
||||
return {
|
||||
"media_content_id": self.media_content_id,
|
||||
"mime_type": self.mime_type,
|
||||
"path": str(self.path),
|
||||
}
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class AssistantContent:
|
||||
@@ -164,6 +269,22 @@ class AssistantContent:
|
||||
thinking_content: str | None = None
|
||||
tool_calls: list[llm.ToolInput] | None = None
|
||||
native: Any = None
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
result: dict[str, Any] = {
|
||||
"role": self.role,
|
||||
"agent_id": self.agent_id,
|
||||
"created": self.created,
|
||||
}
|
||||
if self.content:
|
||||
result["content"] = self.content
|
||||
if self.thinking_content:
|
||||
result["thinking_content"] = self.thinking_content
|
||||
if self.tool_calls:
|
||||
result["tool_calls"] = self.tool_calls
|
||||
return result
|
||||
|
||||
|
||||
@dataclass(frozen=True)
|
||||
@@ -175,6 +296,18 @@ class ToolResultContent:
|
||||
tool_call_id: str
|
||||
tool_name: str
|
||||
tool_result: JsonObjectType
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the content."""
|
||||
return {
|
||||
"role": self.role,
|
||||
"agent_id": self.agent_id,
|
||||
"tool_call_id": self.tool_call_id,
|
||||
"tool_name": self.tool_name,
|
||||
"tool_result": self.tool_result,
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
|
||||
type Content = SystemContent | UserContent | AssistantContent | ToolResultContent
|
||||
@@ -210,6 +343,16 @@ class ChatLog:
|
||||
llm_api: llm.APIInstance | None = None
|
||||
delta_listener: Callable[[ChatLog, dict], None] | None = None
|
||||
llm_input_provided_index = 0
|
||||
created: datetime = field(init=False, default_factory=utcnow)
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return a dictionary representation of the chat log."""
|
||||
return {
|
||||
"conversation_id": self.conversation_id,
|
||||
"continue_conversation": self.continue_conversation,
|
||||
"content": [c.as_dict() for c in self.content],
|
||||
"created": self.created,
|
||||
}
|
||||
|
||||
@property
|
||||
def continue_conversation(self) -> bool:
|
||||
@@ -241,6 +384,12 @@ class ChatLog:
|
||||
"""Add user content to the log."""
|
||||
LOGGER.debug("Adding user content: %s", content)
|
||||
self.content.append(content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{"content": content.as_dict()},
|
||||
)
|
||||
|
||||
@callback
|
||||
def async_add_assistant_content_without_tools(
|
||||
@@ -259,6 +408,12 @@ class ChatLog:
|
||||
):
|
||||
raise ValueError("Non-external tool calls not allowed")
|
||||
self.content.append(content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{"content": content.as_dict()},
|
||||
)
|
||||
|
||||
async def async_add_assistant_content(
|
||||
self,
|
||||
@@ -317,6 +472,14 @@ class ChatLog:
|
||||
tool_result=tool_result,
|
||||
)
|
||||
self.content.append(response_content)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.CONTENT_ADDED,
|
||||
{
|
||||
"content": response_content.as_dict(),
|
||||
},
|
||||
)
|
||||
yield response_content
|
||||
|
||||
async def async_add_delta_content_stream(
|
||||
@@ -593,6 +756,12 @@ class ChatLog:
|
||||
self.llm_api = llm_api
|
||||
self.extra_system_prompt = extra_system_prompt
|
||||
self.content[0] = SystemContent(content=prompt)
|
||||
_async_notify_subscribers(
|
||||
self.hass,
|
||||
self.conversation_id,
|
||||
ChatLogEventType.UPDATED,
|
||||
{"chat_log": self.as_dict()},
|
||||
)
|
||||
|
||||
LOGGER.debug("Prompt: %s", self.content)
|
||||
LOGGER.debug("Tools: %s", self.llm_api.tools if self.llm_api else None)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import IntFlag
|
||||
from enum import IntFlag, StrEnum
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
@@ -34,3 +34,13 @@ class ConversationEntityFeature(IntFlag):
|
||||
|
||||
METADATA_CUSTOM_SENTENCE = "hass_custom_sentence"
|
||||
METADATA_CUSTOM_FILE = "hass_custom_file"
|
||||
|
||||
|
||||
class ChatLogEventType(StrEnum):
|
||||
"""Chat log event type."""
|
||||
|
||||
INITIAL_STATE = "initial_state"
|
||||
CREATED = "created"
|
||||
UPDATED = "updated"
|
||||
DELETED = "deleted"
|
||||
CONTENT_ADDED = "content_added"
|
||||
|
||||
@@ -12,6 +12,7 @@ from homeassistant.components import http, websocket_api
|
||||
from homeassistant.components.http.data_validator import RequestDataValidator
|
||||
from homeassistant.const import MATCH_ALL
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.chat_session import async_get_chat_session
|
||||
from homeassistant.util import language as language_util
|
||||
|
||||
from .agent_manager import (
|
||||
@@ -20,7 +21,8 @@ from .agent_manager import (
|
||||
async_get_agent,
|
||||
get_agent_manager,
|
||||
)
|
||||
from .const import DATA_COMPONENT
|
||||
from .chat_log import DATA_CHAT_LOGS, async_get_chat_log, async_subscribe_chat_logs
|
||||
from .const import DATA_COMPONENT, ChatLogEventType
|
||||
from .entity import ConversationEntity
|
||||
from .models import ConversationInput
|
||||
|
||||
@@ -35,6 +37,8 @@ def async_setup(hass: HomeAssistant) -> None:
|
||||
websocket_api.async_register_command(hass, websocket_list_sentences)
|
||||
websocket_api.async_register_command(hass, websocket_hass_agent_debug)
|
||||
websocket_api.async_register_command(hass, websocket_hass_agent_language_scores)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_chat_log)
|
||||
websocket_api.async_register_command(hass, websocket_subscribe_chat_log_index)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
@@ -265,3 +269,114 @@ class ConversationProcessView(http.HomeAssistantView):
|
||||
)
|
||||
|
||||
return self.json(result.as_dict())
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/chat_log/subscribe",
|
||||
vol.Required("conversation_id"): str,
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
def websocket_subscribe_chat_log(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a chat log."""
|
||||
msg_id = msg["id"]
|
||||
subscribed_conversation = msg["conversation_id"]
|
||||
|
||||
chat_logs = hass.data.get(DATA_CHAT_LOGS)
|
||||
|
||||
if not chat_logs or subscribed_conversation not in chat_logs:
|
||||
connection.send_error(
|
||||
msg_id,
|
||||
websocket_api.ERR_NOT_FOUND,
|
||||
"Conversation chat log not found",
|
||||
)
|
||||
return
|
||||
|
||||
@callback
|
||||
def forward_events(conversation_id: str, event_type: str, data: dict) -> None:
|
||||
"""Forward chat log events to websocket connection."""
|
||||
if conversation_id != subscribed_conversation:
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"conversation_id": conversation_id,
|
||||
"event_type": event_type,
|
||||
"data": data,
|
||||
},
|
||||
)
|
||||
|
||||
if event_type == ChatLogEventType.DELETED:
|
||||
unsubscribe()
|
||||
del connection.subscriptions[msg_id]
|
||||
|
||||
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
|
||||
connection.subscriptions[msg_id] = unsubscribe
|
||||
connection.send_result(msg_id)
|
||||
|
||||
with (
|
||||
async_get_chat_session(hass, subscribed_conversation) as session,
|
||||
async_get_chat_log(hass, session) as chat_log,
|
||||
):
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"event_type": ChatLogEventType.INITIAL_STATE,
|
||||
"data": chat_log.as_dict(),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@websocket_api.websocket_command(
|
||||
{
|
||||
vol.Required("type"): "conversation/chat_log/subscribe_index",
|
||||
}
|
||||
)
|
||||
@websocket_api.require_admin
|
||||
def websocket_subscribe_chat_log_index(
|
||||
hass: HomeAssistant,
|
||||
connection: websocket_api.ActiveConnection,
|
||||
msg: dict[str, Any],
|
||||
) -> None:
|
||||
"""Subscribe to a chat log."""
|
||||
msg_id = msg["id"]
|
||||
|
||||
@callback
|
||||
def forward_events(
|
||||
conversation_id: str, event_type: ChatLogEventType, data: dict
|
||||
) -> None:
|
||||
"""Forward chat log events to websocket connection."""
|
||||
if event_type not in (ChatLogEventType.CREATED, ChatLogEventType.DELETED):
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"conversation_id": conversation_id,
|
||||
"event_type": event_type,
|
||||
"data": data,
|
||||
},
|
||||
)
|
||||
|
||||
unsubscribe = async_subscribe_chat_logs(hass, forward_events)
|
||||
connection.subscriptions[msg["id"]] = unsubscribe
|
||||
connection.send_result(msg["id"])
|
||||
|
||||
chat_logs = hass.data.get(DATA_CHAT_LOGS)
|
||||
|
||||
if not chat_logs:
|
||||
return
|
||||
|
||||
connection.send_event(
|
||||
msg_id,
|
||||
{
|
||||
"event_type": ChatLogEventType.INITIAL_STATE,
|
||||
"data": [c.as_dict() for c in chat_logs.values()],
|
||||
},
|
||||
)
|
||||
|
||||
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "entity",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.7"]
|
||||
"requirements": ["hassil==3.4.0", "home-assistant-intents==2025.11.24"]
|
||||
}
|
||||
|
||||
@@ -108,5 +108,34 @@
|
||||
"toggle_cover_tilt": {
|
||||
"service": "mdi:arrow-top-right-bottom-left"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"awning_opened": {
|
||||
"trigger": "mdi:awning-outline"
|
||||
},
|
||||
"blind_opened": {
|
||||
"trigger": "mdi:blinds-horizontal"
|
||||
},
|
||||
"curtain_opened": {
|
||||
"trigger": "mdi:curtains"
|
||||
},
|
||||
"door_opened": {
|
||||
"trigger": "mdi:door-open"
|
||||
},
|
||||
"garage_opened": {
|
||||
"trigger": "mdi:garage-open"
|
||||
},
|
||||
"gate_opened": {
|
||||
"trigger": "mdi:gate-open"
|
||||
},
|
||||
"shade_opened": {
|
||||
"trigger": "mdi:roller-shade"
|
||||
},
|
||||
"shutter_opened": {
|
||||
"trigger": "mdi:window-shutter-open"
|
||||
},
|
||||
"window_opened": {
|
||||
"trigger": "mdi:window-open"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,16 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description_awning": "The behavior of the targeted awnings to trigger on.",
|
||||
"trigger_behavior_description_blind": "The behavior of the targeted blinds to trigger on.",
|
||||
"trigger_behavior_description_curtain": "The behavior of the targeted curtains to trigger on.",
|
||||
"trigger_behavior_description_door": "The behavior of the targeted doors to trigger on.",
|
||||
"trigger_behavior_description_garage": "The behavior of the targeted garage doors to trigger on.",
|
||||
"trigger_behavior_description_gate": "The behavior of the targeted gates to trigger on.",
|
||||
"trigger_behavior_description_shade": "The behavior of the targeted shades to trigger on.",
|
||||
"trigger_behavior_description_shutter": "The behavior of the targeted shutters to trigger on.",
|
||||
"trigger_behavior_description_window": "The behavior of the targeted windows to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"close": "Close {entity_name}",
|
||||
@@ -82,6 +94,15 @@
|
||||
"name": "Window"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"close_cover": {
|
||||
"description": "Closes a cover.",
|
||||
@@ -136,5 +157,142 @@
|
||||
"name": "Toggle tilt"
|
||||
}
|
||||
},
|
||||
"title": "Cover"
|
||||
"title": "Cover",
|
||||
"triggers": {
|
||||
"awning_opened": {
|
||||
"description": "Triggers when an awning opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::awning_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_awning%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the awnings to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When an awning opens"
|
||||
},
|
||||
"blind_opened": {
|
||||
"description": "Triggers when a blind opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::blind_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_blind%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the blinds to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a blind opens"
|
||||
},
|
||||
"curtain_opened": {
|
||||
"description": "Triggers when a curtain opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::curtain_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_curtain%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the curtains to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a curtain opens"
|
||||
},
|
||||
"door_opened": {
|
||||
"description": "Triggers when a door opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::door_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_door%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the doors to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a door opens"
|
||||
},
|
||||
"garage_opened": {
|
||||
"description": "Triggers when a garage door opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::garage_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_garage%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the garage doors to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a garage door opens"
|
||||
},
|
||||
"gate_opened": {
|
||||
"description": "Triggers when a gate opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::gate_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_gate%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the gates to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a gate opens"
|
||||
},
|
||||
"shade_opened": {
|
||||
"description": "Triggers when a shade opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::shade_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_shade%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the shades to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a shade opens"
|
||||
},
|
||||
"shutter_opened": {
|
||||
"description": "Triggers when a shutter opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::shutter_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_shutter%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the shutters to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a shutter opens"
|
||||
},
|
||||
"window_opened": {
|
||||
"description": "Triggers when a window opens.",
|
||||
"description_configured": "[%key:component::cover::triggers::window_opened::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::cover::common::trigger_behavior_description_window%]",
|
||||
"name": "[%key:component::cover::common::trigger_behavior_name%]"
|
||||
},
|
||||
"fully_opened": {
|
||||
"description": "Require the windows to be fully opened before triggering.",
|
||||
"name": "Fully opened"
|
||||
}
|
||||
},
|
||||
"name": "When a window opens"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
116
homeassistant/components/cover/trigger.py
Normal file
116
homeassistant/components/cover/trigger.py
Normal file
@@ -0,0 +1,116 @@
|
||||
"""Provides triggers for covers."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.const import CONF_OPTIONS
|
||||
from homeassistant.core import HomeAssistant, State
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity import get_device_class
|
||||
from homeassistant.helpers.trigger import (
|
||||
ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST,
|
||||
EntityTriggerBase,
|
||||
Trigger,
|
||||
TriggerConfig,
|
||||
)
|
||||
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
|
||||
|
||||
from . import ATTR_CURRENT_POSITION, CoverDeviceClass, CoverState
|
||||
from .const import DOMAIN
|
||||
|
||||
ATTR_FULLY_OPENED: Final = "fully_opened"
|
||||
|
||||
COVER_OPENED_TRIGGER_SCHEMA = ENTITY_STATE_TRIGGER_SCHEMA_FIRST_LAST.extend(
|
||||
{
|
||||
vol.Required(CONF_OPTIONS): {
|
||||
vol.Required(ATTR_FULLY_OPENED, default=False): bool,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def get_device_class_or_undefined(
|
||||
hass: HomeAssistant, entity_id: str
|
||||
) -> str | None | UndefinedType:
|
||||
"""Get the device class of an entity or UNDEFINED if not found."""
|
||||
try:
|
||||
return get_device_class(hass, entity_id)
|
||||
except HomeAssistantError:
|
||||
return UNDEFINED
|
||||
|
||||
|
||||
class CoverOpenedClosedTrigger(EntityTriggerBase):
|
||||
"""Class for cover opened and closed triggers."""
|
||||
|
||||
_attribute: str = ATTR_CURRENT_POSITION
|
||||
_attribute_value: int | None = None
|
||||
_device_class: CoverDeviceClass | None
|
||||
_domain: str = DOMAIN
|
||||
_to_states: set[str]
|
||||
|
||||
def is_to_state(self, state: State) -> bool:
|
||||
"""Check if the state matches the target state."""
|
||||
if state.state not in self._to_states:
|
||||
return False
|
||||
if (
|
||||
self._attribute_value is not None
|
||||
and (value := state.attributes.get(self._attribute)) is not None
|
||||
and value != self._attribute_value
|
||||
):
|
||||
return False
|
||||
return True
|
||||
|
||||
def entity_filter(self, entities: set[str]) -> set[str]:
|
||||
"""Filter entities of this domain."""
|
||||
entities = super().entity_filter(entities)
|
||||
return {
|
||||
entity_id
|
||||
for entity_id in entities
|
||||
if get_device_class_or_undefined(self._hass, entity_id)
|
||||
== self._device_class
|
||||
}
|
||||
|
||||
|
||||
class CoverOpenedTrigger(CoverOpenedClosedTrigger):
|
||||
"""Class for cover opened triggers."""
|
||||
|
||||
_schema = COVER_OPENED_TRIGGER_SCHEMA
|
||||
_to_states = {CoverState.OPEN, CoverState.OPENING}
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config: TriggerConfig) -> None:
|
||||
"""Initialize the state trigger."""
|
||||
super().__init__(hass, config)
|
||||
if self._options.get(ATTR_FULLY_OPENED):
|
||||
self._attribute_value = 100
|
||||
|
||||
|
||||
def make_cover_opened_trigger(
|
||||
device_class: CoverDeviceClass | None,
|
||||
) -> type[CoverOpenedTrigger]:
|
||||
"""Create an entity state attribute trigger class."""
|
||||
|
||||
class CustomTrigger(CoverOpenedTrigger):
|
||||
"""Trigger for entity state changes."""
|
||||
|
||||
_device_class = device_class
|
||||
|
||||
return CustomTrigger
|
||||
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"awning_opened": make_cover_opened_trigger(CoverDeviceClass.AWNING),
|
||||
"blind_opened": make_cover_opened_trigger(CoverDeviceClass.BLIND),
|
||||
"curtain_opened": make_cover_opened_trigger(CoverDeviceClass.CURTAIN),
|
||||
"door_opened": make_cover_opened_trigger(CoverDeviceClass.DOOR),
|
||||
"garage_opened": make_cover_opened_trigger(CoverDeviceClass.GARAGE),
|
||||
"gate_opened": make_cover_opened_trigger(CoverDeviceClass.GATE),
|
||||
"shade_opened": make_cover_opened_trigger(CoverDeviceClass.SHADE),
|
||||
"shutter_opened": make_cover_opened_trigger(CoverDeviceClass.SHUTTER),
|
||||
"window_opened": make_cover_opened_trigger(CoverDeviceClass.WINDOW),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for covers."""
|
||||
return TRIGGERS
|
||||
79
homeassistant/components/cover/triggers.yaml
Normal file
79
homeassistant/components/cover/triggers.yaml
Normal file
@@ -0,0 +1,79 @@
|
||||
.trigger_common_fields: &trigger_common_fields
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
translation_key: trigger_behavior
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
fully_opened:
|
||||
required: true
|
||||
default: false
|
||||
selector:
|
||||
boolean:
|
||||
|
||||
awning_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: awning
|
||||
|
||||
blind_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: blind
|
||||
|
||||
curtain_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: curtain
|
||||
|
||||
door_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: door
|
||||
|
||||
garage_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: garage
|
||||
|
||||
gate_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: gate
|
||||
|
||||
shade_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: shade
|
||||
|
||||
shutter_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: shutter
|
||||
|
||||
window_opened:
|
||||
fields: *trigger_common_fields
|
||||
target:
|
||||
entity:
|
||||
domain: cover
|
||||
device_class: window
|
||||
@@ -6,5 +6,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["debugpy==1.8.16"]
|
||||
"requirements": ["debugpy==1.8.17"]
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ from typing import Any, cast
|
||||
from aiohttp import ClientSession
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
@@ -18,13 +19,17 @@ from homeassistant.core import (
|
||||
ServiceCall,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.exceptions import ServiceValidationError
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_call_later
|
||||
from homeassistant.helpers.selector import ConfigEntrySelector
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.loader import bind_hass
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import ATTR_CONFIG_ENTRY
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
ATTR_TXT = "txt"
|
||||
@@ -32,7 +37,13 @@ ATTR_TXT = "txt"
|
||||
DOMAIN = "duckdns"
|
||||
|
||||
INTERVAL = timedelta(minutes=5)
|
||||
|
||||
BACKOFF_INTERVALS = (
|
||||
INTERVAL,
|
||||
timedelta(minutes=1),
|
||||
timedelta(minutes=5),
|
||||
timedelta(minutes=15),
|
||||
timedelta(minutes=30),
|
||||
)
|
||||
SERVICE_SET_TXT = "set_txt"
|
||||
|
||||
UPDATE_URL = "https://www.duckdns.org/update"
|
||||
@@ -49,39 +60,112 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
extra=vol.ALLOW_EXTRA,
|
||||
)
|
||||
|
||||
SERVICE_TXT_SCHEMA = vol.Schema({vol.Required(ATTR_TXT): vol.Any(None, cv.string)})
|
||||
SERVICE_TXT_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Optional(ATTR_CONFIG_ENTRY): ConfigEntrySelector(
|
||||
{
|
||||
"integration": DOMAIN,
|
||||
}
|
||||
),
|
||||
vol.Optional(ATTR_TXT): vol.Any(None, cv.string),
|
||||
}
|
||||
)
|
||||
|
||||
type DuckDnsConfigEntry = ConfigEntry
|
||||
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Initialize the DuckDNS component."""
|
||||
domain: str = config[DOMAIN][CONF_DOMAIN]
|
||||
token: str = config[DOMAIN][CONF_ACCESS_TOKEN]
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_SET_TXT,
|
||||
update_domain_service,
|
||||
schema=SERVICE_TXT_SCHEMA,
|
||||
)
|
||||
|
||||
if DOMAIN not in config:
|
||||
return True
|
||||
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
|
||||
"""Set up Duck DNS from a config entry."""
|
||||
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
async def update_domain_interval(_now: datetime) -> bool:
|
||||
"""Update the DuckDNS entry."""
|
||||
return await _update_duckdns(session, domain, token)
|
||||
return await _update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
)
|
||||
|
||||
intervals = (
|
||||
INTERVAL,
|
||||
timedelta(minutes=1),
|
||||
timedelta(minutes=5),
|
||||
timedelta(minutes=15),
|
||||
timedelta(minutes=30),
|
||||
)
|
||||
async_track_time_interval_backoff(hass, update_domain_interval, intervals)
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
"""Update the DuckDNS entry."""
|
||||
await _update_duckdns(session, domain, token, txt=call.data[ATTR_TXT])
|
||||
|
||||
hass.services.async_register(
|
||||
DOMAIN, SERVICE_SET_TXT, update_domain_service, schema=SERVICE_TXT_SCHEMA
|
||||
entry.async_on_unload(
|
||||
async_track_time_interval_backoff(
|
||||
hass, update_domain_interval, BACKOFF_INTERVALS
|
||||
)
|
||||
)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def get_config_entry(
|
||||
hass: HomeAssistant, entry_id: str | None = None
|
||||
) -> DuckDnsConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
|
||||
if entry_id is None:
|
||||
if not (config_entries := hass.config_entries.async_entries(DOMAIN)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
|
||||
if len(config_entries) != 1:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_selected",
|
||||
)
|
||||
return config_entries[0]
|
||||
|
||||
if not (entry := hass.config_entries.async_get_entry(entry_id)):
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="entry_not_found",
|
||||
)
|
||||
|
||||
return entry
|
||||
|
||||
|
||||
async def update_domain_service(call: ServiceCall) -> None:
|
||||
"""Update the DuckDNS entry."""
|
||||
|
||||
entry = get_config_entry(call.hass, call.data.get(ATTR_CONFIG_ENTRY))
|
||||
|
||||
session = async_get_clientsession(call.hass)
|
||||
|
||||
await _update_duckdns(
|
||||
session,
|
||||
entry.data[CONF_DOMAIN],
|
||||
entry.data[CONF_ACCESS_TOKEN],
|
||||
txt=call.data.get(ATTR_TXT),
|
||||
)
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: DuckDnsConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
|
||||
|
||||
_SENTINEL = object()
|
||||
|
||||
|
||||
|
||||
81
homeassistant/components/duckdns/config_flow.py
Normal file
81
homeassistant/components/duckdns/config_flow.py
Normal file
@@ -0,0 +1,81 @@
|
||||
"""Config flow for the Duck DNS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_DOMAIN
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from . import _update_duckdns
|
||||
from .const import DOMAIN
|
||||
from .issue import deprecate_yaml_issue
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_DOMAIN): TextSelector(
|
||||
TextSelectorConfig(type=TextSelectorType.TEXT, suffix=".duckdns.org")
|
||||
),
|
||||
vol.Required(CONF_ACCESS_TOKEN): str,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class DuckDnsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Duck DNS."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match({CONF_DOMAIN: user_input[CONF_DOMAIN]})
|
||||
session = async_get_clientsession(self.hass)
|
||||
try:
|
||||
if not await _update_duckdns(
|
||||
session,
|
||||
user_input[CONF_DOMAIN],
|
||||
user_input[CONF_ACCESS_TOKEN],
|
||||
):
|
||||
errors["base"] = "update_failed"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_DOMAIN]}.duckdns.org", data=user_input
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA, suggested_values=user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"url": "https://www.duckdns.org/"},
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import config from yaml."""
|
||||
|
||||
self._async_abort_entries_match({CONF_DOMAIN: import_info[CONF_DOMAIN]})
|
||||
result = await self.async_step_user(import_info)
|
||||
if errors := result.get("errors"):
|
||||
deprecate_yaml_issue(self.hass, import_success=False)
|
||||
return self.async_abort(reason=errors["base"])
|
||||
|
||||
deprecate_yaml_issue(self.hass, import_success=True)
|
||||
return result
|
||||
7
homeassistant/components/duckdns/const.py
Normal file
7
homeassistant/components/duckdns/const.py
Normal file
@@ -0,0 +1,7 @@
|
||||
"""Constants for the Duck DNS integration."""
|
||||
|
||||
from typing import Final
|
||||
|
||||
DOMAIN = "duckdns"
|
||||
|
||||
ATTR_CONFIG_ENTRY: Final = "config_entry_id"
|
||||
40
homeassistant/components/duckdns/issue.py
Normal file
40
homeassistant/components/duckdns/issue.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Issues for Duck DNS integration."""
|
||||
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def deprecate_yaml_issue(hass: HomeAssistant, *, import_success: bool) -> None:
|
||||
"""Deprecate yaml issue."""
|
||||
if import_success:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Duck DNS",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_issue_error",
|
||||
breaks_in_ha_version="2026.6.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue_error",
|
||||
translation_placeholders={
|
||||
"url": "/config/integrations/dashboard/add?domain=duckdns"
|
||||
},
|
||||
)
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"domain": "duckdns",
|
||||
"name": "Duck DNS",
|
||||
"codeowners": [],
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/duckdns",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "legacy"
|
||||
"iot_class": "cloud_polling"
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
set_txt:
|
||||
fields:
|
||||
config_entry_id:
|
||||
selector:
|
||||
config_entry:
|
||||
integration: duckdns
|
||||
txt:
|
||||
required: true
|
||||
example: "This domain name is reserved for use in documentation"
|
||||
selector:
|
||||
text:
|
||||
|
||||
@@ -1,8 +1,48 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]"
|
||||
},
|
||||
"error": {
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"update_failed": "Updating Duck DNS failed"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"data": {
|
||||
"access_token": "Token",
|
||||
"domain": "Subdomain"
|
||||
},
|
||||
"data_description": {
|
||||
"access_token": "Your Duck DNS account token",
|
||||
"domain": "The Duck DNS subdomain to update"
|
||||
},
|
||||
"description": "Enter your Duck DNS subdomain and token below to configure dynamic DNS updates. You can find your token on the [Duck DNS]({url}) homepage after logging into your account."
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"entry_not_found": {
|
||||
"message": "Duck DNS integration entry not found"
|
||||
},
|
||||
"entry_not_selected": {
|
||||
"message": "Duck DNS integration entry not selected"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_error": {
|
||||
"description": "Configuring Duck DNS using YAML is being removed but there was an error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Duck DNS YAML configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
|
||||
"title": "The Duck DNS YAML configuration import failed"
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
"set_txt": {
|
||||
"description": "Sets the TXT record of your DuckDNS subdomain.",
|
||||
"description": "Sets the TXT record of your Duck DNS subdomain.",
|
||||
"fields": {
|
||||
"config_entry_id": {
|
||||
"description": "The Duck DNS integration ID.",
|
||||
"name": "Integration ID"
|
||||
},
|
||||
"txt": {
|
||||
"description": "Payload for the TXT record.",
|
||||
"name": "TXT"
|
||||
|
||||
@@ -278,11 +278,18 @@ async def async_setup_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> boo
|
||||
for keypad in elk.keypads:
|
||||
keypad.add_callback(_keypad_changed)
|
||||
|
||||
sync_success = False
|
||||
try:
|
||||
if not await async_wait_for_elk_to_sync(elk, LOGIN_TIMEOUT, SYNC_TIMEOUT):
|
||||
return False
|
||||
await ElkSyncWaiter(elk, LOGIN_TIMEOUT, SYNC_TIMEOUT).async_wait()
|
||||
sync_success = True
|
||||
except LoginFailed:
|
||||
_LOGGER.error("ElkM1 login failed for %s", conf[CONF_HOST])
|
||||
return False
|
||||
except TimeoutError as exc:
|
||||
raise ConfigEntryNotReady(f"Timed out connecting to {conf[CONF_HOST]}") from exc
|
||||
finally:
|
||||
if not sync_success:
|
||||
elk.disconnect()
|
||||
|
||||
elk_temp_unit = elk.panel.temperature_units
|
||||
if elk_temp_unit == "C":
|
||||
@@ -321,48 +328,75 @@ async def async_unload_entry(hass: HomeAssistant, entry: ElkM1ConfigEntry) -> bo
|
||||
return unload_ok
|
||||
|
||||
|
||||
async def async_wait_for_elk_to_sync(
|
||||
elk: Elk,
|
||||
login_timeout: int,
|
||||
sync_timeout: int,
|
||||
) -> bool:
|
||||
"""Wait until the elk has finished sync. Can fail login or timeout."""
|
||||
class LoginFailed(Exception):
|
||||
"""Raised when login to ElkM1 fails."""
|
||||
|
||||
sync_event = asyncio.Event()
|
||||
login_event = asyncio.Event()
|
||||
|
||||
success = True
|
||||
class ElkSyncWaiter:
|
||||
"""Wait for ElkM1 to sync."""
|
||||
|
||||
def login_status(succeeded: bool) -> None:
|
||||
nonlocal success
|
||||
def __init__(self, elk: Elk, login_timeout: int, sync_timeout: int) -> None:
|
||||
"""Initialize the sync waiter."""
|
||||
self._elk = elk
|
||||
self._login_timeout = login_timeout
|
||||
self._sync_timeout = sync_timeout
|
||||
self._loop = asyncio.get_running_loop()
|
||||
self._sync_future: asyncio.Future[None] = self._loop.create_future()
|
||||
self._login_future: asyncio.Future[None] = self._loop.create_future()
|
||||
|
||||
success = succeeded
|
||||
@callback
|
||||
def _async_set_future_if_not_done(self, future: asyncio.Future[None]) -> None:
|
||||
"""Set the future result if not already done."""
|
||||
if not future.done():
|
||||
future.set_result(None)
|
||||
|
||||
@callback
|
||||
def _async_login_status(self, succeeded: bool) -> None:
|
||||
"""Handle login status callback."""
|
||||
if succeeded:
|
||||
_LOGGER.debug("ElkM1 login succeeded")
|
||||
login_event.set()
|
||||
self._async_set_future_if_not_done(self._login_future)
|
||||
else:
|
||||
elk.disconnect()
|
||||
_LOGGER.error("ElkM1 login failed; invalid username or password")
|
||||
login_event.set()
|
||||
sync_event.set()
|
||||
self._async_set_exception_if_not_done(self._login_future, LoginFailed)
|
||||
|
||||
def sync_complete() -> None:
|
||||
sync_event.set()
|
||||
@callback
|
||||
def _async_set_exception_if_not_done(
|
||||
self, future: asyncio.Future[None], exception: type[Exception]
|
||||
) -> None:
|
||||
"""Set an exception on the future if not already done."""
|
||||
if not future.done():
|
||||
future.set_exception(exception())
|
||||
|
||||
@callback
|
||||
def _async_sync_complete(self) -> None:
|
||||
"""Handle sync complete callback."""
|
||||
self._async_set_future_if_not_done(self._sync_future)
|
||||
|
||||
async def async_wait(self) -> None:
|
||||
"""Wait for login and sync to complete.
|
||||
|
||||
Raises LoginFailed if login fails.
|
||||
Raises TimeoutError if login or sync times out.
|
||||
"""
|
||||
self._elk.add_handler("login", self._async_login_status)
|
||||
self._elk.add_handler("sync_complete", self._async_sync_complete)
|
||||
|
||||
elk.add_handler("login", login_status)
|
||||
elk.add_handler("sync_complete", sync_complete)
|
||||
for name, event, timeout in (
|
||||
("login", login_event, login_timeout),
|
||||
("sync_complete", sync_event, sync_timeout),
|
||||
):
|
||||
_LOGGER.debug("Waiting for %s event for %s seconds", name, timeout)
|
||||
try:
|
||||
async with asyncio.timeout(timeout):
|
||||
await event.wait()
|
||||
except TimeoutError:
|
||||
_LOGGER.debug("Timed out waiting for %s event", name)
|
||||
elk.disconnect()
|
||||
raise
|
||||
_LOGGER.debug("Received %s event", name)
|
||||
for name, future, timeout in (
|
||||
("login", self._login_future, self._login_timeout),
|
||||
("sync_complete", self._sync_future, self._sync_timeout),
|
||||
):
|
||||
_LOGGER.debug("Waiting for %s event for %s seconds", name, timeout)
|
||||
handle = self._loop.call_later(
|
||||
timeout, self._async_set_exception_if_not_done, future, TimeoutError
|
||||
)
|
||||
try:
|
||||
await future
|
||||
finally:
|
||||
handle.cancel()
|
||||
|
||||
return success
|
||||
_LOGGER.debug("Received %s event", name)
|
||||
finally:
|
||||
self._elk.remove_handler("login", self._async_login_status)
|
||||
self._elk.remove_handler("sync_complete", self._async_sync_complete)
|
||||
|
||||
@@ -25,7 +25,7 @@ from homeassistant.helpers.typing import DiscoveryInfoType, VolDictType
|
||||
from homeassistant.util import slugify
|
||||
from homeassistant.util.network import is_ip_address
|
||||
|
||||
from . import async_wait_for_elk_to_sync, hostname_from_url
|
||||
from . import ElkSyncWaiter, LoginFailed, hostname_from_url
|
||||
from .const import CONF_AUTO_CONFIGURE, DISCOVER_SCAN_TIMEOUT, DOMAIN, LOGIN_TIMEOUT
|
||||
from .discovery import (
|
||||
_short_mac,
|
||||
@@ -89,8 +89,9 @@ async def validate_input(data: dict[str, str], mac: str | None) -> dict[str, str
|
||||
elk.connect()
|
||||
|
||||
try:
|
||||
if not await async_wait_for_elk_to_sync(elk, LOGIN_TIMEOUT, VALIDATE_TIMEOUT):
|
||||
raise InvalidAuth
|
||||
await ElkSyncWaiter(elk, LOGIN_TIMEOUT, VALIDATE_TIMEOUT).async_wait()
|
||||
except LoginFailed as exc:
|
||||
raise InvalidAuth from exc
|
||||
finally:
|
||||
elk.disconnect()
|
||||
|
||||
|
||||
@@ -15,5 +15,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/elkm1",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["elkm1_lib"],
|
||||
"requirements": ["elkm1-lib==2.2.12"]
|
||||
"requirements": ["elkm1-lib==2.2.13"]
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections import Counter
|
||||
from collections.abc import Awaitable, Callable
|
||||
from typing import Literal, NotRequired, TypedDict
|
||||
from typing import Any, Literal, NotRequired, TypedDict
|
||||
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -15,6 +15,7 @@ from homeassistant.helpers import config_validation as cv, singleton, storage
|
||||
from .const import DOMAIN
|
||||
|
||||
STORAGE_VERSION = 1
|
||||
STORAGE_MINOR_VERSION = 2
|
||||
STORAGE_KEY = DOMAIN
|
||||
|
||||
|
||||
@@ -164,6 +165,7 @@ class EnergyPreferences(TypedDict):
|
||||
|
||||
energy_sources: list[SourceType]
|
||||
device_consumption: list[DeviceConsumption]
|
||||
device_consumption_water: NotRequired[list[DeviceConsumption]]
|
||||
|
||||
|
||||
class EnergyPreferencesUpdate(EnergyPreferences, total=False):
|
||||
@@ -328,14 +330,31 @@ DEVICE_CONSUMPTION_SCHEMA = vol.Schema(
|
||||
)
|
||||
|
||||
|
||||
class _EnergyPreferencesStore(storage.Store[EnergyPreferences]):
|
||||
"""Energy preferences store with migration support."""
|
||||
|
||||
async def _async_migrate_func(
|
||||
self,
|
||||
old_major_version: int,
|
||||
old_minor_version: int,
|
||||
old_data: dict[str, Any],
|
||||
) -> dict[str, Any]:
|
||||
"""Migrate to the new version."""
|
||||
data = old_data
|
||||
if old_major_version == 1 and old_minor_version < 2:
|
||||
# Add device_consumption_water field if it doesn't exist
|
||||
data.setdefault("device_consumption_water", [])
|
||||
return data
|
||||
|
||||
|
||||
class EnergyManager:
|
||||
"""Manage the instance energy prefs."""
|
||||
|
||||
def __init__(self, hass: HomeAssistant) -> None:
|
||||
"""Initialize energy manager."""
|
||||
self._hass = hass
|
||||
self._store = storage.Store[EnergyPreferences](
|
||||
hass, STORAGE_VERSION, STORAGE_KEY
|
||||
self._store = _EnergyPreferencesStore(
|
||||
hass, STORAGE_VERSION, STORAGE_KEY, minor_version=STORAGE_MINOR_VERSION
|
||||
)
|
||||
self.data: EnergyPreferences | None = None
|
||||
self._update_listeners: list[Callable[[], Awaitable]] = []
|
||||
@@ -350,6 +369,7 @@ class EnergyManager:
|
||||
return {
|
||||
"energy_sources": [],
|
||||
"device_consumption": [],
|
||||
"device_consumption_water": [],
|
||||
}
|
||||
|
||||
async def async_update(self, update: EnergyPreferencesUpdate) -> None:
|
||||
@@ -362,6 +382,7 @@ class EnergyManager:
|
||||
for key in (
|
||||
"energy_sources",
|
||||
"device_consumption",
|
||||
"device_consumption_water",
|
||||
):
|
||||
if key in update:
|
||||
data[key] = update[key]
|
||||
|
||||
@@ -153,6 +153,9 @@ class EnergyPreferencesValidation:
|
||||
|
||||
energy_sources: list[ValidationIssues] = dataclasses.field(default_factory=list)
|
||||
device_consumption: list[ValidationIssues] = dataclasses.field(default_factory=list)
|
||||
device_consumption_water: list[ValidationIssues] = dataclasses.field(
|
||||
default_factory=list
|
||||
)
|
||||
|
||||
def as_dict(self) -> dict:
|
||||
"""Return dictionary version."""
|
||||
@@ -165,6 +168,10 @@ class EnergyPreferencesValidation:
|
||||
[dataclasses.asdict(issue) for issue in issues.issues.values()]
|
||||
for issues in self.device_consumption
|
||||
],
|
||||
"device_consumption_water": [
|
||||
[dataclasses.asdict(issue) for issue in issues.issues.values()]
|
||||
for issues in self.device_consumption_water
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@@ -742,6 +749,23 @@ async def async_validate(hass: HomeAssistant) -> EnergyPreferencesValidation:
|
||||
)
|
||||
)
|
||||
|
||||
for device in manager.data.get("device_consumption_water", []):
|
||||
device_result = ValidationIssues()
|
||||
result.device_consumption_water.append(device_result)
|
||||
wanted_statistics_metadata.add(device["stat_consumption"])
|
||||
validate_calls.append(
|
||||
functools.partial(
|
||||
_async_validate_usage_stat,
|
||||
hass,
|
||||
statistics_metadata,
|
||||
device["stat_consumption"],
|
||||
WATER_USAGE_DEVICE_CLASSES,
|
||||
WATER_USAGE_UNITS,
|
||||
WATER_UNIT_ERROR,
|
||||
device_result,
|
||||
)
|
||||
)
|
||||
|
||||
# Fetch the needed statistics metadata
|
||||
statistics_metadata.update(
|
||||
await recorder.get_instance(hass).async_add_executor_job(
|
||||
|
||||
@@ -129,6 +129,7 @@ def ws_get_prefs(
|
||||
vol.Required("type"): "energy/save_prefs",
|
||||
vol.Optional("energy_sources"): ENERGY_SOURCE_SCHEMA,
|
||||
vol.Optional("device_consumption"): [DEVICE_CONSUMPTION_SCHEMA],
|
||||
vol.Optional("device_consumption_water"): [DEVICE_CONSUMPTION_SCHEMA],
|
||||
}
|
||||
)
|
||||
@websocket_api.async_response
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"aioesphomeapi==42.7.0",
|
||||
"aioesphomeapi==42.8.0",
|
||||
"esphome-dashboard-api==1.3.0",
|
||||
"bleak-esphome==3.4.0"
|
||||
],
|
||||
|
||||
29
homeassistant/components/essent/__init__.py
Normal file
29
homeassistant/components/essent/__init__.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""The Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.const import Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import EssentConfigEntry, EssentDataUpdateCoordinator
|
||||
|
||||
PLATFORMS: list[Platform] = [Platform.SENSOR]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: EssentConfigEntry) -> bool:
|
||||
"""Set up Essent from a config entry."""
|
||||
coordinator = EssentDataUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
|
||||
# Start listener updates on the hour to advance cached tariffs
|
||||
coordinator.start_listener_schedule()
|
||||
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: EssentConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
|
||||
47
homeassistant/components/essent/config_flow.py
Normal file
47
homeassistant/components/essent/config_flow.py
Normal file
@@ -0,0 +1,47 @@
|
||||
"""Config flow for Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from essent_dynamic_pricing import (
|
||||
EssentClient,
|
||||
EssentConnectionError,
|
||||
EssentDataError,
|
||||
EssentResponseError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class EssentConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Essent."""
|
||||
|
||||
VERSION = 1
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
client = EssentClient(async_get_clientsession(self.hass))
|
||||
|
||||
try:
|
||||
await client.async_get_prices()
|
||||
except (EssentConnectionError, EssentResponseError):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except EssentDataError:
|
||||
return self.async_abort(reason="invalid_data")
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected error while validating the connection")
|
||||
return self.async_abort(reason="unknown")
|
||||
|
||||
if user_input is None:
|
||||
return self.async_show_form(step_id="user")
|
||||
|
||||
return self.async_create_entry(title="Essent", data={})
|
||||
29
homeassistant/components/essent/const.py
Normal file
29
homeassistant/components/essent/const.py
Normal file
@@ -0,0 +1,29 @@
|
||||
"""Constants for the Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from enum import StrEnum
|
||||
from typing import Final
|
||||
|
||||
DOMAIN: Final = "essent"
|
||||
UPDATE_INTERVAL: Final = timedelta(hours=12)
|
||||
ATTRIBUTION: Final = "Data provided by Essent"
|
||||
|
||||
|
||||
class EnergyType(StrEnum):
|
||||
"""Supported energy types for Essent pricing."""
|
||||
|
||||
ELECTRICITY = "electricity"
|
||||
GAS = "gas"
|
||||
|
||||
|
||||
class PriceGroup(StrEnum):
|
||||
"""Price group types as provided in tariff groups.
|
||||
|
||||
VAT is not emitted as a price group; use tariff.total_amount_vat for VAT.
|
||||
"""
|
||||
|
||||
MARKET_PRICE = "MARKET_PRICE"
|
||||
PURCHASING_FEE = "PURCHASING_FEE"
|
||||
TAX = "TAX"
|
||||
108
homeassistant/components/essent/coordinator.py
Normal file
108
homeassistant/components/essent/coordinator.py
Normal file
@@ -0,0 +1,108 @@
|
||||
"""DataUpdateCoordinator for Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
|
||||
from essent_dynamic_pricing import (
|
||||
EssentClient,
|
||||
EssentConnectionError,
|
||||
EssentDataError,
|
||||
EssentError,
|
||||
EssentPrices,
|
||||
EssentResponseError,
|
||||
)
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_point_in_utc_time
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import DOMAIN, UPDATE_INTERVAL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
type EssentConfigEntry = ConfigEntry[EssentDataUpdateCoordinator]
|
||||
|
||||
|
||||
class EssentDataUpdateCoordinator(DataUpdateCoordinator[EssentPrices]):
|
||||
"""Class to manage fetching Essent data."""
|
||||
|
||||
config_entry: EssentConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: EssentConfigEntry) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=UPDATE_INTERVAL,
|
||||
)
|
||||
self._client = EssentClient(async_get_clientsession(hass))
|
||||
self._unsub_listener: Callable[[], None] | None = None
|
||||
|
||||
def start_listener_schedule(self) -> None:
|
||||
"""Start listener tick schedule after first successful data fetch."""
|
||||
if self.config_entry.pref_disable_polling:
|
||||
_LOGGER.debug("Polling disabled by config entry, not starting listener")
|
||||
return
|
||||
if self._unsub_listener:
|
||||
return
|
||||
_LOGGER.info("Starting listener updates on the hour")
|
||||
self._schedule_listener_tick()
|
||||
|
||||
async def async_shutdown(self) -> None:
|
||||
"""Cancel any scheduled call, and ignore new runs."""
|
||||
await super().async_shutdown()
|
||||
if self._unsub_listener:
|
||||
self._unsub_listener()
|
||||
self._unsub_listener = None
|
||||
|
||||
def _schedule_listener_tick(self) -> None:
|
||||
"""Schedule listener updates on the hour to advance cached tariffs."""
|
||||
if self._unsub_listener:
|
||||
self._unsub_listener()
|
||||
|
||||
now = dt_util.utcnow()
|
||||
next_hour = now + timedelta(hours=1)
|
||||
next_run = datetime(
|
||||
next_hour.year,
|
||||
next_hour.month,
|
||||
next_hour.day,
|
||||
next_hour.hour,
|
||||
tzinfo=dt_util.UTC,
|
||||
)
|
||||
|
||||
_LOGGER.debug("Scheduling next listener tick for %s", next_run)
|
||||
|
||||
@callback
|
||||
def _handle(_: datetime) -> None:
|
||||
"""Handle the scheduled listener tick to update sensors."""
|
||||
self._unsub_listener = None
|
||||
_LOGGER.debug("Listener tick fired, updating sensors with cached data")
|
||||
self.async_update_listeners()
|
||||
self._schedule_listener_tick()
|
||||
|
||||
self._unsub_listener = async_track_point_in_utc_time(
|
||||
self.hass,
|
||||
_handle,
|
||||
next_run,
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> EssentPrices:
|
||||
"""Fetch data from API."""
|
||||
try:
|
||||
return await self._client.async_get_prices()
|
||||
except EssentConnectionError as err:
|
||||
raise UpdateFailed(f"Error communicating with API: {err}") from err
|
||||
except EssentResponseError as err:
|
||||
raise UpdateFailed(str(err)) from err
|
||||
except EssentDataError as err:
|
||||
_LOGGER.debug("Invalid data received: %s", err)
|
||||
raise UpdateFailed(str(err)) from err
|
||||
except EssentError as err:
|
||||
raise UpdateFailed("Unexpected Essent error") from err
|
||||
36
homeassistant/components/essent/entity.py
Normal file
36
homeassistant/components/essent/entity.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Base entity for Essent integration."""
|
||||
|
||||
from essent_dynamic_pricing.models import EnergyData
|
||||
|
||||
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import ATTRIBUTION, DOMAIN, EnergyType
|
||||
from .coordinator import EssentDataUpdateCoordinator
|
||||
|
||||
|
||||
class EssentEntity(CoordinatorEntity[EssentDataUpdateCoordinator]):
|
||||
"""Base class for Essent entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_attribution = ATTRIBUTION
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EssentDataUpdateCoordinator,
|
||||
energy_type: EnergyType,
|
||||
) -> None:
|
||||
"""Initialize the entity."""
|
||||
super().__init__(coordinator)
|
||||
self.energy_type = energy_type
|
||||
self._attr_device_info = DeviceInfo(
|
||||
entry_type=DeviceEntryType.SERVICE,
|
||||
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
|
||||
name="Essent",
|
||||
manufacturer="Essent",
|
||||
)
|
||||
|
||||
@property
|
||||
def energy_data(self) -> EnergyData:
|
||||
"""Return the energy data for this entity."""
|
||||
return getattr(self.coordinator.data, self.energy_type.value)
|
||||
12
homeassistant/components/essent/manifest.json
Normal file
12
homeassistant/components/essent/manifest.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"domain": "essent",
|
||||
"name": "Essent",
|
||||
"codeowners": ["@jaapp"],
|
||||
"config_flow": true,
|
||||
"documentation": "https://www.home-assistant.io/integrations/essent",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "silver",
|
||||
"requirements": ["essent-dynamic-pricing==0.2.7"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
89
homeassistant/components/essent/quality_scale.yaml
Normal file
89
homeassistant/components/essent/quality_scale.yaml
Normal file
@@ -0,0 +1,89 @@
|
||||
rules:
|
||||
# Bronze
|
||||
action-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not provide additional actions or services.
|
||||
appropriate-polling: done
|
||||
brands: done
|
||||
common-modules: done
|
||||
config-flow-test-coverage: done
|
||||
config-flow: done
|
||||
dependency-transparency: done
|
||||
docs-actions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not expose services or actions.
|
||||
docs-high-level-description: done
|
||||
docs-installation-instructions: done
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup:
|
||||
status: exempt
|
||||
comment: |
|
||||
Entities rely on DataUpdateCoordinator updates rather than event subscriptions.
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
action-exceptions:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not expose user actions.
|
||||
config-entry-unloading: done
|
||||
docs-configuration-parameters:
|
||||
status: exempt
|
||||
comment: |
|
||||
No options flow is provided.
|
||||
docs-installation-parameters: done
|
||||
entity-unavailable: done
|
||||
integration-owner: done
|
||||
log-when-unavailable: done
|
||||
parallel-updates: done
|
||||
reauthentication-flow:
|
||||
status: exempt
|
||||
comment: |
|
||||
No authentication is required for this integration.
|
||||
test-coverage: done
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
docs-examples: todo
|
||||
docs-known-limitations: todo
|
||||
docs-supported-devices: todo
|
||||
docs-supported-functions: todo
|
||||
docs-troubleshooting: todo
|
||||
docs-use-cases: todo
|
||||
dynamic-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device-less integration.
|
||||
entity-category: done
|
||||
entity-device-class: done
|
||||
entity-disabled-by-default: done
|
||||
entity-translations: done
|
||||
exception-translations: todo
|
||||
icon-translations:
|
||||
status: exempt
|
||||
comment: |
|
||||
No custom icons are defined.
|
||||
reconfiguration-flow: todo
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: |
|
||||
No known repair flows at this time.
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: |
|
||||
Device-less integration.
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
inject-websession: done
|
||||
strict-typing: todo
|
||||
215
homeassistant/components/essent/sensor.py
Normal file
215
homeassistant/components/essent/sensor.py
Normal file
@@ -0,0 +1,215 @@
|
||||
"""Sensor platform for Essent integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from essent_dynamic_pricing.models import EnergyData, Tariff
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
EntityCategory,
|
||||
SensorEntity,
|
||||
SensorEntityDescription,
|
||||
SensorStateClass,
|
||||
)
|
||||
from homeassistant.const import CURRENCY_EURO
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
from .const import EnergyType, PriceGroup
|
||||
from .coordinator import EssentConfigEntry, EssentDataUpdateCoordinator
|
||||
from .entity import EssentEntity
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
class EssentSensorEntityDescription(SensorEntityDescription):
|
||||
"""Describe an Essent sensor."""
|
||||
|
||||
value_fn: Callable[[EnergyData], float | None]
|
||||
energy_types: tuple[EnergyType, ...] = (EnergyType.ELECTRICITY, EnergyType.GAS)
|
||||
|
||||
|
||||
def _get_all_tariffs(data: EnergyData) -> list[Tariff]:
|
||||
"""Return tariffs for both today and tomorrow."""
|
||||
return [*data.tariffs, *data.tariffs_tomorrow]
|
||||
|
||||
|
||||
def _get_current_tariff(data: EnergyData) -> Tariff | None:
|
||||
"""Return the currently active tariff."""
|
||||
now = dt_util.now()
|
||||
for tariff in _get_all_tariffs(data):
|
||||
if tariff.start is None or tariff.end is None:
|
||||
continue
|
||||
if tariff.start <= now < tariff.end:
|
||||
return tariff
|
||||
_LOGGER.debug("No current tariff found")
|
||||
return None
|
||||
|
||||
|
||||
def _get_next_tariff(data: EnergyData) -> Tariff | None:
|
||||
"""Return the next tariff."""
|
||||
now = dt_util.now()
|
||||
for tariff in _get_all_tariffs(data):
|
||||
if tariff.start is None:
|
||||
continue
|
||||
if tariff.start > now:
|
||||
return tariff
|
||||
_LOGGER.debug("No upcoming tariff found")
|
||||
return None
|
||||
|
||||
|
||||
def _get_current_tariff_groups(
|
||||
data: EnergyData,
|
||||
) -> tuple[Tariff | None, dict[str, Any]]:
|
||||
"""Return the current tariff and grouped amounts."""
|
||||
if (tariff := _get_current_tariff(data)) is None:
|
||||
return None, {}
|
||||
groups = {
|
||||
group["type"]: group.get("amount") for group in tariff.groups if "type" in group
|
||||
}
|
||||
return tariff, groups
|
||||
|
||||
|
||||
SENSORS: tuple[EssentSensorEntityDescription, ...] = (
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price",
|
||||
translation_key="current_price",
|
||||
value_fn=lambda energy_data: (
|
||||
None
|
||||
if (tariff := _get_current_tariff(energy_data)) is None
|
||||
else tariff.total_amount
|
||||
),
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="next_price",
|
||||
translation_key="next_price",
|
||||
value_fn=lambda energy_data: (
|
||||
None
|
||||
if (tariff := _get_next_tariff(energy_data)) is None
|
||||
else tariff.total_amount
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="average_today",
|
||||
translation_key="average_today",
|
||||
value_fn=lambda energy_data: energy_data.avg_price,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="lowest_price_today",
|
||||
translation_key="lowest_price_today",
|
||||
value_fn=lambda energy_data: energy_data.min_price,
|
||||
energy_types=(EnergyType.ELECTRICITY,),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="highest_price_today",
|
||||
translation_key="highest_price_today",
|
||||
value_fn=lambda energy_data: energy_data.max_price,
|
||||
energy_types=(EnergyType.ELECTRICITY,),
|
||||
entity_registry_enabled_default=False,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_ex_vat",
|
||||
translation_key="current_price_ex_vat",
|
||||
value_fn=lambda energy_data: (
|
||||
None
|
||||
if (tariff := _get_current_tariff(energy_data)) is None
|
||||
else tariff.total_amount_ex
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_vat",
|
||||
translation_key="current_price_vat",
|
||||
value_fn=lambda energy_data: (
|
||||
None
|
||||
if (tariff := _get_current_tariff(energy_data)) is None
|
||||
# VAT is exposed as tariff.total_amount_vat, not as a tariff group
|
||||
else tariff.total_amount_vat
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_market_price",
|
||||
translation_key="current_price_market_price",
|
||||
value_fn=lambda energy_data: _get_current_tariff_groups(energy_data)[1].get(
|
||||
PriceGroup.MARKET_PRICE
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_purchasing_fee",
|
||||
translation_key="current_price_purchasing_fee",
|
||||
value_fn=lambda energy_data: _get_current_tariff_groups(energy_data)[1].get(
|
||||
PriceGroup.PURCHASING_FEE
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
EssentSensorEntityDescription(
|
||||
key="current_price_tax",
|
||||
translation_key="current_price_tax",
|
||||
value_fn=lambda energy_data: _get_current_tariff_groups(energy_data)[1].get(
|
||||
PriceGroup.TAX
|
||||
),
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: EssentConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Essent sensors."""
|
||||
coordinator = entry.runtime_data
|
||||
async_add_entities(
|
||||
EssentSensor(coordinator, energy_type, description)
|
||||
for description in SENSORS
|
||||
for energy_type in description.energy_types
|
||||
)
|
||||
|
||||
|
||||
class EssentSensor(EssentEntity, SensorEntity):
|
||||
"""Generic Essent sensor driven by entity descriptions."""
|
||||
|
||||
_attr_state_class = SensorStateClass.MEASUREMENT
|
||||
_attr_suggested_display_precision = 3
|
||||
|
||||
entity_description: EssentSensorEntityDescription
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: EssentDataUpdateCoordinator,
|
||||
energy_type: EnergyType,
|
||||
description: EssentSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator, energy_type)
|
||||
self.entity_description = description
|
||||
self._attr_unique_id = f"{energy_type}-{description.key}"
|
||||
self._attr_translation_key = f"{energy_type}_{description.translation_key}"
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the current value."""
|
||||
return self.entity_description.value_fn(self.energy_data)
|
||||
|
||||
@property
|
||||
def native_unit_of_measurement(self) -> str:
|
||||
"""Return the unit of measurement."""
|
||||
return f"{CURRENCY_EURO}/{self.energy_data.unit}"
|
||||
73
homeassistant/components/essent/strings.json
Normal file
73
homeassistant/components/essent/strings.json
Normal file
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"invalid_data": "Received invalid data from Essent",
|
||||
"single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]"
|
||||
},
|
||||
"step": {
|
||||
"user": {
|
||||
"description": "Set up Essent dynamic energy price monitoring for the Netherlands. For customers with dynamic pricing contracts only."
|
||||
}
|
||||
}
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"electricity_average_today": {
|
||||
"name": "Average electricity price today"
|
||||
},
|
||||
"electricity_current_price": {
|
||||
"name": "Current electricity price"
|
||||
},
|
||||
"electricity_current_price_ex_vat": {
|
||||
"name": "Current electricity price excl. VAT"
|
||||
},
|
||||
"electricity_current_price_market_price": {
|
||||
"name": "Current electricity market price"
|
||||
},
|
||||
"electricity_current_price_purchasing_fee": {
|
||||
"name": "Current electricity purchasing fee"
|
||||
},
|
||||
"electricity_current_price_tax": {
|
||||
"name": "Current electricity tax"
|
||||
},
|
||||
"electricity_current_price_vat": {
|
||||
"name": "Current electricity VAT"
|
||||
},
|
||||
"electricity_highest_price_today": {
|
||||
"name": "Highest electricity price today"
|
||||
},
|
||||
"electricity_lowest_price_today": {
|
||||
"name": "Lowest electricity price today"
|
||||
},
|
||||
"electricity_next_price": {
|
||||
"name": "Next electricity price"
|
||||
},
|
||||
"gas_average_today": {
|
||||
"name": "Average gas price today"
|
||||
},
|
||||
"gas_current_price": {
|
||||
"name": "Current gas price"
|
||||
},
|
||||
"gas_current_price_ex_vat": {
|
||||
"name": "Current gas price excl. VAT"
|
||||
},
|
||||
"gas_current_price_market_price": {
|
||||
"name": "Current gas market price"
|
||||
},
|
||||
"gas_current_price_purchasing_fee": {
|
||||
"name": "Current gas purchasing fee"
|
||||
},
|
||||
"gas_current_price_tax": {
|
||||
"name": "Current gas tax"
|
||||
},
|
||||
"gas_current_price_vat": {
|
||||
"name": "Current gas VAT"
|
||||
},
|
||||
"gas_next_price": {
|
||||
"name": "Next gas price"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -47,5 +47,13 @@
|
||||
"turn_on": {
|
||||
"service": "mdi:fan"
|
||||
}
|
||||
},
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"trigger": "mdi:fan-off"
|
||||
},
|
||||
"turned_on": {
|
||||
"trigger": "mdi:fan"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
{
|
||||
"common": {
|
||||
"trigger_behavior_description": "The behavior of the targeted fans to trigger on.",
|
||||
"trigger_behavior_name": "Behavior"
|
||||
},
|
||||
"device_automation": {
|
||||
"action_type": {
|
||||
"toggle": "[%key:common::device_automation::action_type::toggle%]",
|
||||
@@ -66,6 +70,13 @@
|
||||
"forward": "Forward",
|
||||
"reverse": "Reverse"
|
||||
}
|
||||
},
|
||||
"trigger_behavior": {
|
||||
"options": {
|
||||
"any": "Any",
|
||||
"first": "First",
|
||||
"last": "Last"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
@@ -152,5 +163,29 @@
|
||||
"name": "[%key:common::action::turn_on%]"
|
||||
}
|
||||
},
|
||||
"title": "Fan"
|
||||
"title": "Fan",
|
||||
"triggers": {
|
||||
"turned_off": {
|
||||
"description": "Triggers when a fan is turned off.",
|
||||
"description_configured": "[%key:component::fan::triggers::turned_off::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::fan::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a fan is turned off"
|
||||
},
|
||||
"turned_on": {
|
||||
"description": "Triggers when a fan is turned on.",
|
||||
"description_configured": "[%key:component::fan::triggers::turned_on::description%]",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::fan::common::trigger_behavior_description%]",
|
||||
"name": "[%key:component::fan::common::trigger_behavior_name%]"
|
||||
}
|
||||
},
|
||||
"name": "When a fan is turned on"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
17
homeassistant/components/fan/trigger.py
Normal file
17
homeassistant/components/fan/trigger.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Provides triggers for fans."""
|
||||
|
||||
from homeassistant.const import STATE_OFF, STATE_ON
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.trigger import Trigger, make_entity_state_trigger
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
TRIGGERS: dict[str, type[Trigger]] = {
|
||||
"turned_off": make_entity_state_trigger(DOMAIN, STATE_OFF),
|
||||
"turned_on": make_entity_state_trigger(DOMAIN, STATE_ON),
|
||||
}
|
||||
|
||||
|
||||
async def async_get_triggers(hass: HomeAssistant) -> dict[str, type[Trigger]]:
|
||||
"""Return the triggers for fans."""
|
||||
return TRIGGERS
|
||||
18
homeassistant/components/fan/triggers.yaml
Normal file
18
homeassistant/components/fan/triggers.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
.trigger_common: &trigger_common
|
||||
target:
|
||||
entity:
|
||||
domain: fan
|
||||
fields:
|
||||
behavior:
|
||||
required: true
|
||||
default: any
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- first
|
||||
- last
|
||||
- any
|
||||
translation_key: trigger_behavior
|
||||
|
||||
turned_on: *trigger_common
|
||||
turned_off: *trigger_common
|
||||
@@ -778,7 +778,7 @@ class ManifestJSONView(HomeAssistantView):
|
||||
{
|
||||
"type": "frontend/get_icons",
|
||||
vol.Required("category"): vol.In(
|
||||
{"entity", "entity_component", "services", "triggers", "conditions"}
|
||||
{"conditions", "entity", "entity_component", "services", "triggers"}
|
||||
),
|
||||
vol.Optional("integration"): vol.All(cv.ensure_list, [str]),
|
||||
}
|
||||
|
||||
@@ -19,6 +19,9 @@
|
||||
],
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"preview_features": {
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20251105.1"]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,12 @@
|
||||
{
|
||||
"preview_features": {
|
||||
"winter_mode": {
|
||||
"description": "Adds falling snowflakes on your screen. Get your home ready for winter! ❄️",
|
||||
"disable_confirmation": "Snowflakes will no longer fall on your screen. You can re-enable this at any time in labs settings.",
|
||||
"enable_confirmation": "Snowflakes will start falling on your screen. You can turn this off at any time in labs settings.",
|
||||
"name": "Winter mode"
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
"mode": {
|
||||
"options": {
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user