mirror of
https://github.com/home-assistant/core.git
synced 2025-09-23 20:09:35 +00:00
Compare commits
1 Commits
manual_tri
...
sort-commo
Author | SHA1 | Date | |
---|---|---|---|
![]() |
5ae036a7e2 |
18
.github/workflows/builder.yml
vendored
18
.github/workflows/builder.yml
vendored
@@ -94,7 +94,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of frontend
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/frontend
|
||||
@@ -105,7 +105,7 @@ jobs:
|
||||
|
||||
- name: Download nightly wheels of intents
|
||||
if: needs.init.outputs.channel == 'dev'
|
||||
uses: dawidd6/action-download-artifact@v9
|
||||
uses: dawidd6/action-download-artifact@v8
|
||||
with:
|
||||
github_token: ${{secrets.GITHUB_TOKEN}}
|
||||
repo: home-assistant/intents-package
|
||||
@@ -175,7 +175,7 @@ jobs:
|
||||
sed -i "s|pykrakenapi|# pykrakenapi|g" requirements_all.txt
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -197,7 +197,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -263,7 +263,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build base image
|
||||
uses: home-assistant/builder@2025.02.0
|
||||
uses: home-assistant/builder@2024.08.2
|
||||
with:
|
||||
args: |
|
||||
$BUILD_ARGS \
|
||||
@@ -462,7 +462,7 @@ jobs:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
|
||||
- name: Download translations
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: translations
|
||||
|
||||
@@ -509,7 +509,7 @@ jobs:
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Docker image
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -522,7 +522,7 @@ jobs:
|
||||
- name: Push Docker image
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
id: push
|
||||
uses: docker/build-push-action@471d1dc4e07e5cdedd4c2171150001c434f0b7a4 # v6.15.0
|
||||
uses: docker/build-push-action@0adf9959216b96bec444f325f1e493d4aa344497 # v6.14.0
|
||||
with:
|
||||
context: . # So action will not pull the repository again
|
||||
file: ./script/hassfest/docker/Dockerfile
|
||||
@@ -531,7 +531,7 @@ jobs:
|
||||
|
||||
- name: Generate artifact attestation
|
||||
if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true'
|
||||
uses: actions/attest-build-provenance@bd77c077858b8d561b7a36cbe48ef4cc642ca39d # v2.2.2
|
||||
uses: actions/attest-build-provenance@520d128f165991a6c774bcb264f323e3d70747f4 # v2.2.0
|
||||
with:
|
||||
subject-name: ${{ env.HASSFEST_IMAGE_NAME }}
|
||||
subject-digest: ${{ steps.push.outputs.digest }}
|
||||
|
56
.github/workflows/ci.yaml
vendored
56
.github/workflows/ci.yaml
vendored
@@ -40,7 +40,7 @@ env:
|
||||
CACHE_VERSION: 11
|
||||
UV_CACHE_VERSION: 1
|
||||
MYPY_CACHE_VERSION: 9
|
||||
HA_SHORT_VERSION: "2025.4"
|
||||
HA_SHORT_VERSION: "2025.3"
|
||||
DEFAULT_PYTHON: "3.13"
|
||||
ALL_PYTHON_VERSIONS: "['3.13']"
|
||||
# 10.3 is the oldest supported version
|
||||
@@ -240,7 +240,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -256,7 +256,7 @@ jobs:
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
@@ -286,7 +286,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -295,7 +295,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -326,7 +326,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -335,7 +335,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -366,7 +366,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -375,7 +375,7 @@ jobs:
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
@@ -482,7 +482,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
@@ -490,7 +490,7 @@ jobs:
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore uv wheel cache
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.1
|
||||
with:
|
||||
path: ${{ env.UV_CACHE_DIR }}
|
||||
key: >-
|
||||
@@ -578,7 +578,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -611,7 +611,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -649,7 +649,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -692,7 +692,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -739,7 +739,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -791,7 +791,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -799,7 +799,7 @@ jobs:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.python_cache_key }}
|
||||
- name: Restore mypy cache
|
||||
uses: actions/cache@v4.2.2
|
||||
uses: actions/cache@v4.2.1
|
||||
with:
|
||||
path: .mypy_cache
|
||||
key: >-
|
||||
@@ -865,7 +865,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -929,7 +929,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -942,7 +942,7 @@ jobs:
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/pytest-slow.json"
|
||||
- name: Download pytest_buckets
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: pytest_buckets
|
||||
- name: Compile English translations
|
||||
@@ -1051,7 +1051,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1181,7 +1181,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1271,12 +1271,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
uses: codecov/codecov-action@v5.3.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
flags: full-suite
|
||||
@@ -1328,7 +1328,7 @@ jobs:
|
||||
check-latest: true
|
||||
- name: Restore full Python ${{ matrix.python-version }} virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache/restore@v4.2.2
|
||||
uses: actions/cache/restore@v4.2.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
@@ -1410,12 +1410,12 @@ jobs:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v4.2.2
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
pattern: coverage-*
|
||||
- name: Upload coverage to Codecov
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
uses: codecov/codecov-action@v5.4.0
|
||||
uses: codecov/codecov-action@v5.3.1
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
58
.github/workflows/wheels.yml
vendored
58
.github/workflows/wheels.yml
vendored
@@ -138,17 +138,17 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
@@ -159,7 +159,7 @@ jobs:
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -187,22 +187,22 @@ jobs:
|
||||
uses: actions/checkout@v4.2.2
|
||||
|
||||
- name: Download env_file
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: env_file
|
||||
|
||||
- name: Download build_constraints
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: build_constraints
|
||||
|
||||
- name: Download requirements_diff
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: requirements_diff
|
||||
|
||||
- name: Download requirements_all_wheels
|
||||
uses: actions/download-artifact@v4.1.9
|
||||
uses: actions/download-artifact@v4.1.8
|
||||
with:
|
||||
name: requirements_all_wheels
|
||||
|
||||
@@ -218,8 +218,16 @@ jobs:
|
||||
sed -i "/uv/d" requirements.txt
|
||||
sed -i "/uv/d" requirements_diff.txt
|
||||
|
||||
- name: Build wheels
|
||||
uses: home-assistant/wheels@2025.02.0
|
||||
- name: Split requirements all
|
||||
run: |
|
||||
# We split requirements all into multiple files.
|
||||
# This is to prevent the build from running out of memory when
|
||||
# resolving packages on 32-bits systems (like armhf, armv7).
|
||||
|
||||
split -l $(expr $(expr $(cat requirements_all.txt | wc -l) + 1) / 3) requirements_all_wheels_${{ matrix.arch }}.txt requirements_all.txt
|
||||
|
||||
- name: Build wheels (part 1)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
@@ -230,4 +238,32 @@ jobs:
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txt"
|
||||
requirements: "requirements_all.txtaa"
|
||||
|
||||
- name: Build wheels (part 2)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtab"
|
||||
|
||||
- name: Build wheels (part 3)
|
||||
uses: home-assistant/wheels@2024.11.0
|
||||
with:
|
||||
abi: ${{ matrix.abi }}
|
||||
tag: musllinux_1_2
|
||||
arch: ${{ matrix.arch }}
|
||||
wheels-key: ${{ secrets.WHEELS_KEY }}
|
||||
env-file: true
|
||||
apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm;zlib-ng-dev"
|
||||
skip-binary: aiohttp;charset-normalizer;grpcio;multidict;SQLAlchemy;propcache;protobuf;pymicro-vad;yarl
|
||||
constraints: "homeassistant/package_constraints.txt"
|
||||
requirements-diff: "requirements_diff.txt"
|
||||
requirements: "requirements_all.txtac"
|
||||
|
@@ -1,6 +1,6 @@
|
||||
repos:
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.9.8
|
||||
rev: v0.9.7
|
||||
hooks:
|
||||
- id: ruff
|
||||
args:
|
||||
|
@@ -396,7 +396,6 @@ homeassistant.components.pure_energie.*
|
||||
homeassistant.components.purpleair.*
|
||||
homeassistant.components.pushbullet.*
|
||||
homeassistant.components.pvoutput.*
|
||||
homeassistant.components.pyload.*
|
||||
homeassistant.components.python_script.*
|
||||
homeassistant.components.qbus.*
|
||||
homeassistant.components.qnap_qsw.*
|
||||
@@ -529,7 +528,6 @@ homeassistant.components.vallox.*
|
||||
homeassistant.components.valve.*
|
||||
homeassistant.components.velbus.*
|
||||
homeassistant.components.vlc_telnet.*
|
||||
homeassistant.components.vodafone_station.*
|
||||
homeassistant.components.wake_on_lan.*
|
||||
homeassistant.components.wake_word.*
|
||||
homeassistant.components.wallbox.*
|
||||
|
1
.vscode/launch.json
vendored
1
.vscode/launch.json
vendored
@@ -38,6 +38,7 @@
|
||||
"module": "pytest",
|
||||
"justMyCode": false,
|
||||
"args": [
|
||||
"--timeout=10",
|
||||
"--picked"
|
||||
],
|
||||
},
|
||||
|
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1401,8 +1401,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/smappee/ @bsmappee
|
||||
/homeassistant/components/smart_meter_texas/ @grahamwetzler
|
||||
/tests/components/smart_meter_texas/ @grahamwetzler
|
||||
/homeassistant/components/smartthings/ @joostlek
|
||||
/tests/components/smartthings/ @joostlek
|
||||
/homeassistant/components/smarttub/ @mdz
|
||||
/tests/components/smarttub/ @mdz
|
||||
/homeassistant/components/smarty/ @z0mbieprocess
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["accuweather"],
|
||||
"requirements": ["accuweather==4.1.0"],
|
||||
"requirements": ["accuweather==4.0.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -14,7 +14,7 @@ from homeassistant.components.notify import (
|
||||
)
|
||||
from homeassistant.const import STATE_IDLE, STATE_OFF, STATE_ON
|
||||
from homeassistant.core import Event, EventStateChangedData, HassJob, HomeAssistant
|
||||
from homeassistant.exceptions import ServiceNotFound, ServiceValidationError
|
||||
from homeassistant.exceptions import ServiceNotFound
|
||||
from homeassistant.helpers.entity import Entity
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_point_in_time,
|
||||
@@ -195,8 +195,7 @@ class AlertEntity(Entity):
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Async Acknowledge alert."""
|
||||
if not self._can_ack:
|
||||
raise ServiceValidationError("This alert cannot be acknowledged")
|
||||
LOGGER.debug("Acknowledged Alert: %s", self._attr_name)
|
||||
self._ack = True
|
||||
self.async_write_ha_state()
|
||||
|
||||
|
@@ -2,8 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
|
||||
import anthropic
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
@@ -12,7 +10,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
|
||||
from .const import CONF_CHAT_MODEL, DOMAIN, LOGGER, RECOMMENDED_CHAT_MODEL
|
||||
from .const import DOMAIN, LOGGER
|
||||
|
||||
PLATFORMS = (Platform.CONVERSATION,)
|
||||
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)
|
||||
@@ -22,13 +20,14 @@ type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient]
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
|
||||
"""Set up Anthropic from a config entry."""
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=entry.data[CONF_API_KEY])
|
||||
)
|
||||
client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY])
|
||||
try:
|
||||
model_id = entry.options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL)
|
||||
model = await client.models.retrieve(model_id=model_id, timeout=10.0)
|
||||
LOGGER.debug("Anthropic model: %s", model.display_name)
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
except anthropic.AuthenticationError as err:
|
||||
LOGGER.error("Invalid API key: %s", err)
|
||||
return False
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
import logging
|
||||
from types import MappingProxyType
|
||||
from typing import Any
|
||||
@@ -60,10 +59,13 @@ async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
client = await hass.async_add_executor_job(
|
||||
partial(anthropic.AsyncAnthropic, api_key=data[CONF_API_KEY])
|
||||
client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY])
|
||||
await client.messages.create(
|
||||
model="claude-3-haiku-20240307",
|
||||
max_tokens=1,
|
||||
messages=[{"role": "user", "content": "Hi"}],
|
||||
timeout=10.0,
|
||||
)
|
||||
await client.models.list(timeout=10.0)
|
||||
|
||||
|
||||
class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
|
@@ -233,6 +233,7 @@ class AppleTVManager(DeviceListener):
|
||||
pass
|
||||
except Exception:
|
||||
_LOGGER.exception("Failed to connect")
|
||||
await self.disconnect()
|
||||
|
||||
async def _connect_loop(self) -> None:
|
||||
"""Connect loop background task function."""
|
||||
|
@@ -117,7 +117,7 @@ async def async_pipeline_from_audio_stream(
|
||||
"""
|
||||
with chat_session.async_get_chat_session(hass, conversation_id) as session:
|
||||
pipeline_input = PipelineInput(
|
||||
session=session,
|
||||
conversation_id=session.conversation_id,
|
||||
device_id=device_id,
|
||||
stt_metadata=stt_metadata,
|
||||
stt_stream=stt_stream,
|
||||
|
@@ -19,7 +19,14 @@ import wave
|
||||
import hass_nabucasa
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import conversation, stt, tts, wake_word, websocket_api
|
||||
from homeassistant.components import (
|
||||
conversation,
|
||||
media_source,
|
||||
stt,
|
||||
tts,
|
||||
wake_word,
|
||||
websocket_api,
|
||||
)
|
||||
from homeassistant.components.tts import (
|
||||
generate_media_source_id as tts_generate_media_source_id,
|
||||
)
|
||||
@@ -89,9 +96,6 @@ ENGINE_LANGUAGE_PAIRS = (
|
||||
)
|
||||
|
||||
KEY_ASSIST_PIPELINE: HassKey[PipelineData] = HassKey(DOMAIN)
|
||||
KEY_PIPELINE_CONVERSATION_DATA: HassKey[dict[str, PipelineConversationData]] = HassKey(
|
||||
"pipeline_conversation_data"
|
||||
)
|
||||
|
||||
|
||||
def validate_language(data: dict[str, Any]) -> Any:
|
||||
@@ -562,7 +566,8 @@ class PipelineRun:
|
||||
|
||||
id: str = field(default_factory=ulid_util.ulid_now)
|
||||
stt_provider: stt.SpeechToTextEntity | stt.Provider = field(init=False, repr=False)
|
||||
tts_stream: tts.ResultStream | None = field(init=False, default=None)
|
||||
tts_engine: str = field(init=False, repr=False)
|
||||
tts_options: dict | None = field(init=False, default=None)
|
||||
wake_word_entity_id: str | None = field(init=False, default=None, repr=False)
|
||||
wake_word_entity: wake_word.WakeWordDetectionEntity = field(init=False, repr=False)
|
||||
|
||||
@@ -585,12 +590,6 @@ class PipelineRun:
|
||||
_device_id: str | None = None
|
||||
"""Optional device id set during run start."""
|
||||
|
||||
_conversation_data: PipelineConversationData | None = None
|
||||
"""Data tied to the conversation ID."""
|
||||
|
||||
_intent_agent_only = False
|
||||
"""If request should only be handled by agent, ignoring sentence triggers and local processing."""
|
||||
|
||||
def __post_init__(self) -> None:
|
||||
"""Set language for pipeline."""
|
||||
self.language = self.pipeline.language or self.hass.config.language
|
||||
@@ -640,18 +639,13 @@ class PipelineRun:
|
||||
self._device_id = device_id
|
||||
self._start_debug_recording_thread()
|
||||
|
||||
data: dict[str, Any] = {
|
||||
data = {
|
||||
"pipeline": self.pipeline.id,
|
||||
"language": self.language,
|
||||
"conversation_id": conversation_id,
|
||||
}
|
||||
if self.runner_data is not None:
|
||||
data["runner_data"] = self.runner_data
|
||||
if self.tts_stream:
|
||||
data["tts_output"] = {
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
}
|
||||
|
||||
self.process_event(PipelineEvent(PipelineEventType.RUN_START, data))
|
||||
|
||||
@@ -1013,36 +1007,19 @@ class PipelineRun:
|
||||
|
||||
yield chunk.audio
|
||||
|
||||
async def prepare_recognize_intent(self, session: chat_session.ChatSession) -> None:
|
||||
async def prepare_recognize_intent(self) -> None:
|
||||
"""Prepare recognizing an intent."""
|
||||
self._conversation_data = async_get_pipeline_conversation_data(
|
||||
self.hass, session
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass,
|
||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||
)
|
||||
|
||||
if self._conversation_data.continue_conversation_agent is not None:
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass, self._conversation_data.continue_conversation_agent
|
||||
if agent_info is None:
|
||||
engine = self.pipeline.conversation_engine or "default"
|
||||
raise IntentRecognitionError(
|
||||
code="intent-not-supported",
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
)
|
||||
self._conversation_data.continue_conversation_agent = None
|
||||
if agent_info is None:
|
||||
raise IntentRecognitionError(
|
||||
code="intent-agent-not-found",
|
||||
message=f"Intent recognition engine {self._conversation_data.continue_conversation_agent} asked for follow-up but is no longer found",
|
||||
)
|
||||
self._intent_agent_only = True
|
||||
|
||||
else:
|
||||
agent_info = conversation.async_get_agent_info(
|
||||
self.hass,
|
||||
self.pipeline.conversation_engine or conversation.HOME_ASSISTANT_AGENT,
|
||||
)
|
||||
|
||||
if agent_info is None:
|
||||
engine = self.pipeline.conversation_engine or "default"
|
||||
raise IntentRecognitionError(
|
||||
code="intent-not-supported",
|
||||
message=f"Intent recognition engine {engine} is not found",
|
||||
)
|
||||
|
||||
self.intent_agent = agent_info.id
|
||||
|
||||
@@ -1054,7 +1031,7 @@ class PipelineRun:
|
||||
conversation_extra_system_prompt: str | None,
|
||||
) -> str:
|
||||
"""Run intent recognition portion of pipeline. Returns text to speak."""
|
||||
if self.intent_agent is None or self._conversation_data is None:
|
||||
if self.intent_agent is None:
|
||||
raise RuntimeError("Recognize intent was not prepared")
|
||||
|
||||
if self.pipeline.conversation_language == MATCH_ALL:
|
||||
@@ -1101,7 +1078,7 @@ class PipelineRun:
|
||||
agent_id = self.intent_agent
|
||||
processed_locally = agent_id == conversation.HOME_ASSISTANT_AGENT
|
||||
intent_response: intent.IntentResponse | None = None
|
||||
if not processed_locally and not self._intent_agent_only:
|
||||
if not processed_locally:
|
||||
# Sentence triggers override conversation agent
|
||||
if (
|
||||
trigger_response_text
|
||||
@@ -1126,16 +1103,12 @@ class PipelineRun:
|
||||
) & conversation.ConversationEntityFeature.CONTROL:
|
||||
intent_filter = _async_local_fallback_intent_filter
|
||||
|
||||
# Try local intents
|
||||
if (
|
||||
intent_response is None
|
||||
and self.pipeline.prefer_local_intents
|
||||
and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
# Try local intents first, if preferred.
|
||||
elif self.pipeline.prefer_local_intents and (
|
||||
intent_response := await conversation.async_handle_intents(
|
||||
self.hass,
|
||||
user_input,
|
||||
intent_filter=intent_filter,
|
||||
)
|
||||
):
|
||||
# Local intent matched
|
||||
@@ -1218,9 +1191,6 @@ class PipelineRun:
|
||||
)
|
||||
)
|
||||
|
||||
if conversation_result.continue_conversation:
|
||||
self._conversation_data.continue_conversation_agent = agent_id
|
||||
|
||||
return speech
|
||||
|
||||
async def prepare_text_to_speech(self) -> None:
|
||||
@@ -1243,31 +1213,36 @@ class PipelineRun:
|
||||
tts_options[tts.ATTR_PREFERRED_SAMPLE_BYTES] = SAMPLE_WIDTH
|
||||
|
||||
try:
|
||||
self.tts_stream = tts.async_create_stream(
|
||||
hass=self.hass,
|
||||
engine=engine,
|
||||
language=self.pipeline.tts_language,
|
||||
options=tts_options,
|
||||
options_supported = await tts.async_support_options(
|
||||
self.hass,
|
||||
engine,
|
||||
self.pipeline.tts_language,
|
||||
tts_options,
|
||||
)
|
||||
except HomeAssistantError as err:
|
||||
raise TextToSpeechError(
|
||||
code="tts-not-supported",
|
||||
message=f"Text-to-speech engine '{engine}' not found",
|
||||
) from err
|
||||
if not options_supported:
|
||||
raise TextToSpeechError(
|
||||
code="tts-not-supported",
|
||||
message=(
|
||||
f"Text-to-speech engine {engine} "
|
||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}:"
|
||||
f" {err}"
|
||||
f"does not support language {self.pipeline.tts_language} or options {tts_options}"
|
||||
),
|
||||
) from err
|
||||
)
|
||||
|
||||
self.tts_engine = engine
|
||||
self.tts_options = tts_options
|
||||
|
||||
async def text_to_speech(self, tts_input: str) -> None:
|
||||
"""Run text-to-speech portion of pipeline."""
|
||||
assert self.tts_stream is not None
|
||||
|
||||
self.process_event(
|
||||
PipelineEvent(
|
||||
PipelineEventType.TTS_START,
|
||||
{
|
||||
"engine": self.tts_stream.engine,
|
||||
"engine": self.tts_engine,
|
||||
"language": self.pipeline.tts_language,
|
||||
"voice": self.pipeline.tts_voice,
|
||||
"tts_input": tts_input,
|
||||
@@ -1280,9 +1255,14 @@ class PipelineRun:
|
||||
tts_media_id = tts_generate_media_source_id(
|
||||
self.hass,
|
||||
tts_input,
|
||||
engine=self.tts_stream.engine,
|
||||
language=self.tts_stream.language,
|
||||
options=self.tts_stream.options,
|
||||
engine=self.tts_engine,
|
||||
language=self.pipeline.tts_language,
|
||||
options=self.tts_options,
|
||||
)
|
||||
tts_media = await media_source.async_resolve_media(
|
||||
self.hass,
|
||||
tts_media_id,
|
||||
None,
|
||||
)
|
||||
except Exception as src_error:
|
||||
_LOGGER.exception("Unexpected error during text-to-speech")
|
||||
@@ -1291,12 +1271,10 @@ class PipelineRun:
|
||||
message="Unexpected error during text-to-speech",
|
||||
) from src_error
|
||||
|
||||
self.tts_stream.async_set_message(tts_input)
|
||||
|
||||
_LOGGER.debug("TTS result %s", tts_media)
|
||||
tts_output = {
|
||||
"media_id": tts_media_id,
|
||||
"url": self.tts_stream.url,
|
||||
"mime_type": self.tts_stream.content_type,
|
||||
**asdict(tts_media),
|
||||
}
|
||||
|
||||
self.process_event(
|
||||
@@ -1476,8 +1454,8 @@ class PipelineInput:
|
||||
|
||||
run: PipelineRun
|
||||
|
||||
session: chat_session.ChatSession
|
||||
"""Session for the conversation."""
|
||||
conversation_id: str
|
||||
"""Identifier for the conversation."""
|
||||
|
||||
stt_metadata: stt.SpeechMetadata | None = None
|
||||
"""Metadata of stt input audio. Required when start_stage = stt."""
|
||||
@@ -1502,9 +1480,7 @@ class PipelineInput:
|
||||
|
||||
async def execute(self) -> None:
|
||||
"""Run pipeline."""
|
||||
self.run.start(
|
||||
conversation_id=self.session.conversation_id, device_id=self.device_id
|
||||
)
|
||||
self.run.start(conversation_id=self.conversation_id, device_id=self.device_id)
|
||||
current_stage: PipelineStage | None = self.run.start_stage
|
||||
stt_audio_buffer: list[EnhancedAudioChunk] = []
|
||||
stt_processed_stream: AsyncIterable[EnhancedAudioChunk] | None = None
|
||||
@@ -1588,7 +1564,7 @@ class PipelineInput:
|
||||
assert intent_input is not None
|
||||
tts_input = await self.run.recognize_intent(
|
||||
intent_input,
|
||||
self.session.conversation_id,
|
||||
self.conversation_id,
|
||||
self.device_id,
|
||||
self.conversation_extra_system_prompt,
|
||||
)
|
||||
@@ -1672,7 +1648,7 @@ class PipelineInput:
|
||||
<= PIPELINE_STAGE_ORDER.index(PipelineStage.INTENT)
|
||||
<= end_stage_index
|
||||
):
|
||||
prepare_tasks.append(self.run.prepare_recognize_intent(self.session))
|
||||
prepare_tasks.append(self.run.prepare_recognize_intent())
|
||||
|
||||
if (
|
||||
start_stage_index
|
||||
@@ -1951,7 +1927,7 @@ class PipelineRunDebug:
|
||||
|
||||
|
||||
class PipelineStore(Store[SerializedPipelineStorageCollection]):
|
||||
"""Store pipeline data."""
|
||||
"""Store entity registry data."""
|
||||
|
||||
async def _async_migrate_func(
|
||||
self,
|
||||
@@ -2033,37 +2009,3 @@ async def async_run_migrations(hass: HomeAssistant) -> None:
|
||||
|
||||
for pipeline, attr_updates in updates:
|
||||
await async_update_pipeline(hass, pipeline, **attr_updates)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PipelineConversationData:
|
||||
"""Hold data for the duration of a conversation."""
|
||||
|
||||
continue_conversation_agent: str | None = None
|
||||
"""The agent that requested the conversation to be continued."""
|
||||
|
||||
|
||||
@callback
|
||||
def async_get_pipeline_conversation_data(
|
||||
hass: HomeAssistant, session: chat_session.ChatSession
|
||||
) -> PipelineConversationData:
|
||||
"""Get the pipeline data for a specific conversation."""
|
||||
all_conversation_data = hass.data.get(KEY_PIPELINE_CONVERSATION_DATA)
|
||||
if all_conversation_data is None:
|
||||
all_conversation_data = {}
|
||||
hass.data[KEY_PIPELINE_CONVERSATION_DATA] = all_conversation_data
|
||||
|
||||
data = all_conversation_data.get(session.conversation_id)
|
||||
|
||||
if data is not None:
|
||||
return data
|
||||
|
||||
@callback
|
||||
def do_cleanup() -> None:
|
||||
"""Handle cleanup."""
|
||||
all_conversation_data.pop(session.conversation_id)
|
||||
|
||||
session.async_on_cleanup(do_cleanup)
|
||||
|
||||
data = all_conversation_data[session.conversation_id] = PipelineConversationData()
|
||||
return data
|
||||
|
@@ -239,7 +239,7 @@ async def websocket_run(
|
||||
with chat_session.async_get_chat_session(
|
||||
hass, msg.get("conversation_id")
|
||||
) as session:
|
||||
input_args["session"] = session
|
||||
input_args["conversation_id"] = session.conversation_id
|
||||
pipeline_input = PipelineInput(**input_args)
|
||||
|
||||
try:
|
||||
|
@@ -13,11 +13,7 @@ from azure.storage.blob.aio import ContainerClient
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryError,
|
||||
ConfigEntryNotReady,
|
||||
)
|
||||
from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_create_clientsession
|
||||
|
||||
from .const import (
|
||||
@@ -56,7 +52,7 @@ async def async_setup_entry(
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
) from err
|
||||
except ClientAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
raise ConfigEntryError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="invalid_auth",
|
||||
translation_placeholders={CONF_ACCOUNT_NAME: entry.data[CONF_ACCOUNT_NAME]},
|
||||
|
@@ -1,6 +1,5 @@
|
||||
"""Config flow for Azure Storage integration."""
|
||||
|
||||
from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
@@ -27,26 +26,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for azure storage."""
|
||||
|
||||
def get_account_url(self, account_name: str) -> str:
|
||||
"""Get the account URL."""
|
||||
return f"https://{account_name}.blob.core.windows.net/"
|
||||
|
||||
async def validate_config(
|
||||
self, container_client: ContainerClient
|
||||
) -> dict[str, str]:
|
||||
"""Validate the configuration."""
|
||||
errors: dict[str, str] = {}
|
||||
try:
|
||||
await container_client.exists()
|
||||
except ResourceNotFoundError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except ClientAuthenticationError:
|
||||
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown exception occurred")
|
||||
errors["base"] = "unknown"
|
||||
return errors
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
@@ -59,13 +38,20 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
{CONF_ACCOUNT_NAME: user_input[CONF_ACCOUNT_NAME]}
|
||||
)
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(user_input[CONF_ACCOUNT_NAME]),
|
||||
account_url=f"https://{user_input[CONF_ACCOUNT_NAME]}.blob.core.windows.net/",
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
|
||||
try:
|
||||
await container_client.exists()
|
||||
except ResourceNotFoundError:
|
||||
errors["base"] = "cannot_connect"
|
||||
except ClientAuthenticationError:
|
||||
errors[CONF_STORAGE_ACCOUNT_KEY] = "invalid_auth"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unknown exception occurred")
|
||||
errors["base"] = "unknown"
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_ACCOUNT_NAME]}/{user_input[CONF_CONTAINER_NAME]}",
|
||||
@@ -84,77 +70,3 @@ class AzureStorageConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
) -> ConfigFlowResult:
|
||||
"""Perform reauth upon an API authentication error."""
|
||||
return await self.async_step_reauth_confirm()
|
||||
|
||||
async def async_step_reauth_confirm(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Confirm reauth dialog."""
|
||||
errors: dict[str, str] = {}
|
||||
reauth_entry = self._get_reauth_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(reauth_entry.data[CONF_ACCOUNT_NAME]),
|
||||
container_name=reauth_entry.data[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reauth_entry,
|
||||
data={**reauth_entry.data, **user_input},
|
||||
)
|
||||
return self.async_show_form(
|
||||
step_id="reauth_confirm",
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_STORAGE_ACCOUNT_KEY): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Reconfigure the entry."""
|
||||
errors: dict[str, str] = {}
|
||||
reconfigure_entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
container_client = ContainerClient(
|
||||
account_url=self.get_account_url(
|
||||
reconfigure_entry.data[CONF_ACCOUNT_NAME]
|
||||
),
|
||||
container_name=user_input[CONF_CONTAINER_NAME],
|
||||
credential=user_input[CONF_STORAGE_ACCOUNT_KEY],
|
||||
transport=AioHttpTransport(session=async_get_clientsession(self.hass)),
|
||||
)
|
||||
errors = await self.validate_config(container_client)
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
reconfigure_entry,
|
||||
data={**reconfigure_entry.data, **user_input},
|
||||
)
|
||||
return self.async_show_form(
|
||||
data_schema=vol.Schema(
|
||||
{
|
||||
vol.Required(
|
||||
CONF_CONTAINER_NAME,
|
||||
default=reconfigure_entry.data[CONF_CONTAINER_NAME],
|
||||
): str,
|
||||
vol.Required(
|
||||
CONF_STORAGE_ACCOUNT_KEY,
|
||||
default=reconfigure_entry.data[CONF_STORAGE_ACCOUNT_KEY],
|
||||
): str,
|
||||
}
|
||||
),
|
||||
errors=errors,
|
||||
)
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["azure-storage-blob"],
|
||||
"quality_scale": "platinum",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["azure-storage-blob==12.24.0"]
|
||||
}
|
||||
|
@@ -57,7 +57,7 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have platforms.
|
||||
reauthentication-flow: done
|
||||
reauthentication-flow: todo
|
||||
test-coverage: done
|
||||
|
||||
# Gold
|
||||
@@ -121,7 +121,7 @@ rules:
|
||||
status: exempt
|
||||
comment: |
|
||||
This integration does not have entities.
|
||||
reconfiguration-flow: done
|
||||
reconfiguration-flow: todo
|
||||
repair-issues: done
|
||||
stale-devices:
|
||||
status: exempt
|
||||
|
@@ -19,34 +19,10 @@
|
||||
},
|
||||
"description": "Set up an Azure (Blob) storage account to be used for backups.",
|
||||
"title": "Add Azure storage account"
|
||||
},
|
||||
"reauth_confirm": {
|
||||
"data": {
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
|
||||
},
|
||||
"description": "Provide a new storage account key.",
|
||||
"title": "Reauthenticate Azure storage account"
|
||||
},
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"container_name": "[%key:component::azure_storage::config::step::user::data::container_name%]",
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data::storage_account_key%]"
|
||||
},
|
||||
"data_description": {
|
||||
"container_name": "[%key:component::azure_storage::config::step::user::data_description::container_name%]",
|
||||
"storage_account_key": "[%key:component::azure_storage::config::step::user::data_description::storage_account_key%]"
|
||||
},
|
||||
"description": "Change the settings of the Azure storage integration.",
|
||||
"title": "Reconfigure Azure storage account"
|
||||
}
|
||||
},
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]",
|
||||
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_account%]"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
|
@@ -14,7 +14,6 @@ from itertools import chain
|
||||
import json
|
||||
from pathlib import Path, PurePath
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
import time
|
||||
from typing import IO, TYPE_CHECKING, Any, Protocol, TypedDict, cast
|
||||
@@ -309,12 +308,6 @@ class DecryptOnDowloadNotSupported(BackupManagerError):
|
||||
_message = "On-the-fly decryption is not supported for this backup."
|
||||
|
||||
|
||||
class BackupManagerExceptionGroup(BackupManagerError, ExceptionGroup):
|
||||
"""Raised when multiple exceptions occur."""
|
||||
|
||||
error_code = "multiple_errors"
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Define the format that backup managers can have."""
|
||||
|
||||
@@ -1612,24 +1605,10 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
)
|
||||
finally:
|
||||
# Inform integrations the backup is done
|
||||
# If there's an unhandled exception, we keep it so we can rethrow it in case
|
||||
# the post backup actions also fail.
|
||||
unhandled_exc = sys.exception()
|
||||
try:
|
||||
try:
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
except Exception as err:
|
||||
if not unhandled_exc:
|
||||
raise
|
||||
# If there's an unhandled exception, we wrap both that and the exception
|
||||
# from the post backup actions in an ExceptionGroup so the caller is
|
||||
# aware of both exceptions.
|
||||
raise BackupManagerExceptionGroup(
|
||||
f"Multiple errors when creating backup: {unhandled_exc}, {err}",
|
||||
[unhandled_exc, err],
|
||||
) from None
|
||||
await manager.async_post_backup_actions()
|
||||
except BackupManagerError as err:
|
||||
raise BackupReaderWriterError(str(err)) from err
|
||||
|
||||
def _mkdir_and_generate_backup_contents(
|
||||
self,
|
||||
@@ -1641,13 +1620,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
"""Generate backup contents and return the size."""
|
||||
if not tar_file_path:
|
||||
tar_file_path = self.temp_backup_dir / f"{backup_data['slug']}.tar"
|
||||
try:
|
||||
make_backup_dir(tar_file_path.parent)
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Failed to create dir {tar_file_path.parent}: "
|
||||
f"{err} ({err.__class__.__name__})"
|
||||
) from err
|
||||
make_backup_dir(tar_file_path.parent)
|
||||
|
||||
excludes = EXCLUDE_FROM_BACKUP
|
||||
if not database_included:
|
||||
@@ -1685,14 +1658,7 @@ class CoreBackupReaderWriter(BackupReaderWriter):
|
||||
file_filter=is_excluded_by_filter,
|
||||
arcname="data",
|
||||
)
|
||||
try:
|
||||
stat_result = tar_file_path.stat()
|
||||
except OSError as err:
|
||||
raise BackupReaderWriterError(
|
||||
f"Error getting size of {tar_file_path}: "
|
||||
f"{err} ({err.__class__.__name__})"
|
||||
) from err
|
||||
return (tar_file_path, stat_result.st_size)
|
||||
return (tar_file_path, tar_file_path.stat().st_size)
|
||||
|
||||
async def async_receive_backup(
|
||||
self,
|
||||
|
@@ -21,7 +21,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
PLATFORMS = [
|
||||
Platform.BINARY_SENSOR,
|
||||
Platform.CLIMATE,
|
||||
Platform.EVENT,
|
||||
Platform.FAN,
|
||||
Platform.LIGHT,
|
||||
Platform.SELECT,
|
||||
@@ -29,6 +28,7 @@ PLATFORMS = [
|
||||
Platform.TIME,
|
||||
]
|
||||
|
||||
|
||||
KEEP_ALIVE_INTERVAL = timedelta(minutes=1)
|
||||
SYNC_TIME_INTERVAL = timedelta(hours=1)
|
||||
|
||||
|
@@ -1,91 +0,0 @@
|
||||
"""Support for Balboa events."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
from pybalboa import EVENT_UPDATE, SpaClient
|
||||
|
||||
from homeassistant.components.event import EventEntity
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
|
||||
from . import BalboaConfigEntry
|
||||
from .entity import BalboaEntity
|
||||
|
||||
FAULT = "fault"
|
||||
FAULT_DATE = "fault_date"
|
||||
REQUEST_FAULT_LOG_INTERVAL = timedelta(minutes=5)
|
||||
|
||||
FAULT_MESSAGE_CODE_MAP: dict[int, str] = {
|
||||
15: "sensor_out_of_sync",
|
||||
16: "low_flow",
|
||||
17: "flow_failed",
|
||||
18: "settings_reset",
|
||||
19: "priming_mode",
|
||||
20: "clock_failed",
|
||||
21: "settings_reset",
|
||||
22: "memory_failure",
|
||||
26: "service_sensor_sync",
|
||||
27: "heater_dry",
|
||||
28: "heater_may_be_dry",
|
||||
29: "water_too_hot",
|
||||
30: "heater_too_hot",
|
||||
31: "sensor_a_fault",
|
||||
32: "sensor_b_fault",
|
||||
34: "pump_stuck",
|
||||
35: "hot_fault",
|
||||
36: "gfci_test_failed",
|
||||
37: "standby_mode",
|
||||
}
|
||||
FAULT_EVENT_TYPES = sorted(set(FAULT_MESSAGE_CODE_MAP.values()))
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: BalboaConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the spa's events."""
|
||||
async_add_entities([BalboaEventEntity(entry.runtime_data)])
|
||||
|
||||
|
||||
class BalboaEventEntity(BalboaEntity, EventEntity):
|
||||
"""Representation of a Balboa event entity."""
|
||||
|
||||
_attr_event_types = FAULT_EVENT_TYPES
|
||||
_attr_translation_key = FAULT
|
||||
|
||||
def __init__(self, spa: SpaClient) -> None:
|
||||
"""Initialize a Balboa event entity."""
|
||||
super().__init__(spa, FAULT)
|
||||
|
||||
@callback
|
||||
def _async_handle_event(self) -> None:
|
||||
"""Handle the fault event."""
|
||||
if not (fault := self._client.fault):
|
||||
return
|
||||
fault_date = fault.fault_datetime.isoformat()
|
||||
if self.state_attributes.get(FAULT_DATE) != fault_date:
|
||||
self._trigger_event(
|
||||
FAULT_MESSAGE_CODE_MAP.get(fault.message_code, fault.message),
|
||||
{FAULT_DATE: fault_date, "code": fault.message_code},
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""Run when entity about to be added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self.async_on_remove(self._client.on(EVENT_UPDATE, self._async_handle_event))
|
||||
|
||||
async def request_fault_log(now: datetime | None = None) -> None:
|
||||
"""Request the most recent fault log."""
|
||||
await self._client.request_fault_log()
|
||||
|
||||
await request_fault_log()
|
||||
self.async_on_remove(
|
||||
async_track_time_interval(
|
||||
self.hass, request_fault_log, REQUEST_FAULT_LOG_INTERVAL
|
||||
)
|
||||
)
|
@@ -57,35 +57,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"event": {
|
||||
"fault": {
|
||||
"name": "Fault",
|
||||
"state_attributes": {
|
||||
"event_type": {
|
||||
"state": {
|
||||
"sensor_out_of_sync": "Sensors are out of sync",
|
||||
"low_flow": "The water flow is low",
|
||||
"flow_failed": "The water flow has failed",
|
||||
"settings_reset": "The settings have been reset",
|
||||
"priming_mode": "Priming mode",
|
||||
"clock_failed": "The clock has failed",
|
||||
"memory_failure": "Program memory failure",
|
||||
"service_sensor_sync": "Sensors are out of sync -- call for service",
|
||||
"heater_dry": "The heater is dry",
|
||||
"heater_may_be_dry": "The heater may be dry",
|
||||
"water_too_hot": "The water is too hot",
|
||||
"heater_too_hot": "The heater is too hot",
|
||||
"sensor_a_fault": "Sensor A fault",
|
||||
"sensor_b_fault": "Sensor B fault",
|
||||
"pump_stuck": "A pump may be stuck on",
|
||||
"hot_fault": "Hot fault",
|
||||
"gfci_test_failed": "The GFCI test failed",
|
||||
"standby_mode": "Standby mode (hold mode)"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"fan": {
|
||||
"pump": {
|
||||
"name": "Pump {index}"
|
||||
|
@@ -311,24 +311,11 @@ async def async_update_device(
|
||||
update the device with the new location so they can
|
||||
figure out where the adapter is.
|
||||
"""
|
||||
address = details[ADAPTER_ADDRESS]
|
||||
connections = {(dr.CONNECTION_BLUETOOTH, address)}
|
||||
device_registry = dr.async_get(hass)
|
||||
# We only have one device for the config entry
|
||||
# so if the address has been corrected, make
|
||||
# sure the device entry reflects the correct
|
||||
# address
|
||||
for device in dr.async_entries_for_config_entry(device_registry, entry.entry_id):
|
||||
for conn_type, conn_value in device.connections:
|
||||
if conn_type == dr.CONNECTION_BLUETOOTH and conn_value != address:
|
||||
device_registry.async_update_device(
|
||||
device.id, new_connections=connections
|
||||
)
|
||||
break
|
||||
device_entry = device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
name=adapter_human_name(adapter, address),
|
||||
connections=connections,
|
||||
name=adapter_human_name(adapter, details[ADAPTER_ADDRESS]),
|
||||
connections={(dr.CONNECTION_BLUETOOTH, details[ADAPTER_ADDRESS])},
|
||||
manufacturer=details[ADAPTER_MANUFACTURER],
|
||||
model=adapter_model(details),
|
||||
sw_version=details.get(ADAPTER_SW_VERSION),
|
||||
@@ -355,9 +342,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
)
|
||||
)
|
||||
)
|
||||
return True
|
||||
address = entry.unique_id
|
||||
assert address is not None
|
||||
assert source_entry is not None
|
||||
source_domain = entry.data[CONF_SOURCE_DOMAIN]
|
||||
if mac_manufacturer := await get_manufacturer_from_mac(address):
|
||||
manufacturer = f"{mac_manufacturer} ({source_domain})"
|
||||
|
@@ -186,28 +186,16 @@ class BluetoothConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a flow initialized by an external scanner."""
|
||||
source = user_input[CONF_SOURCE]
|
||||
await self.async_set_unique_id(source)
|
||||
source_config_entry_id = user_input[CONF_SOURCE_CONFIG_ENTRY_ID]
|
||||
data = {
|
||||
CONF_SOURCE: source,
|
||||
CONF_SOURCE_MODEL: user_input[CONF_SOURCE_MODEL],
|
||||
CONF_SOURCE_DOMAIN: user_input[CONF_SOURCE_DOMAIN],
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: source_config_entry_id,
|
||||
CONF_SOURCE_CONFIG_ENTRY_ID: user_input[CONF_SOURCE_CONFIG_ENTRY_ID],
|
||||
CONF_SOURCE_DEVICE_ID: user_input[CONF_SOURCE_DEVICE_ID],
|
||||
}
|
||||
self._abort_if_unique_id_configured(updates=data)
|
||||
for entry in self._async_current_entries(include_ignore=False):
|
||||
# If the mac address needs to be corrected, migrate
|
||||
# the config entry to the new mac address
|
||||
if (
|
||||
entry.data.get(CONF_SOURCE_CONFIG_ENTRY_ID) == source_config_entry_id
|
||||
and entry.unique_id != source
|
||||
):
|
||||
self.hass.config_entries.async_update_entry(
|
||||
entry, unique_id=source, data={**entry.data, **data}
|
||||
)
|
||||
self.hass.config_entries.async_schedule_reload(entry.entry_id)
|
||||
return self.async_abort(reason="already_configured")
|
||||
scanner = get_manager().async_scanner_by_source(source)
|
||||
manager = get_manager()
|
||||
scanner = manager.async_scanner_by_source(source)
|
||||
assert scanner is not None
|
||||
return self.async_create_entry(title=scanner.name, data=data)
|
||||
|
||||
|
@@ -21,6 +21,6 @@
|
||||
"bluetooth-auto-recovery==1.4.4",
|
||||
"bluetooth-data-tools==1.23.4",
|
||||
"dbus-fast==2.33.0",
|
||||
"habluetooth==3.24.1"
|
||||
"habluetooth==3.24.0"
|
||||
]
|
||||
}
|
||||
|
@@ -138,8 +138,6 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
await self.hass.async_add_executor_job(
|
||||
partial(self._calendar.save_todo, **item_data),
|
||||
)
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV save error: {err}") from err
|
||||
|
||||
@@ -174,8 +172,6 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
obj_type="todo",
|
||||
),
|
||||
)
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV save error: {err}") from err
|
||||
|
||||
@@ -199,5 +195,3 @@ class WebDavTodoListEntity(TodoListEntity):
|
||||
await self.hass.async_add_executor_job(item.delete)
|
||||
except (requests.ConnectionError, DAVError) as err:
|
||||
raise HomeAssistantError(f"CalDAV delete error: {err}") from err
|
||||
# refreshing async otherwise it would take too much time
|
||||
self.hass.async_create_task(self.async_update_ha_state(force_refresh=True))
|
||||
|
@@ -68,6 +68,7 @@ from .const import ( # noqa: F401
|
||||
FAN_ON,
|
||||
FAN_TOP,
|
||||
HVAC_MODES,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
PRESET_ACTIVITY,
|
||||
PRESET_AWAY,
|
||||
|
@@ -126,6 +126,7 @@ DEFAULT_MAX_HUMIDITY = 99
|
||||
|
||||
DOMAIN = "climate"
|
||||
|
||||
INTENT_GET_TEMPERATURE = "HassClimateGetTemperature"
|
||||
INTENT_SET_TEMPERATURE = "HassClimateSetTemperature"
|
||||
|
||||
SERVICE_SET_AUX_HEAT = "set_aux_heat"
|
||||
|
@@ -1,4 +1,4 @@
|
||||
"""Intents for the climate integration."""
|
||||
"""Intents for the client integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -11,6 +11,7 @@ from homeassistant.helpers import config_validation as cv, intent
|
||||
from . import (
|
||||
ATTR_TEMPERATURE,
|
||||
DOMAIN,
|
||||
INTENT_GET_TEMPERATURE,
|
||||
INTENT_SET_TEMPERATURE,
|
||||
SERVICE_SET_TEMPERATURE,
|
||||
ClimateEntityFeature,
|
||||
@@ -19,9 +20,49 @@ from . import (
|
||||
|
||||
async def async_setup_intents(hass: HomeAssistant) -> None:
|
||||
"""Set up the climate intents."""
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
intent.async_register(hass, SetTemperatureIntent())
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
}
|
||||
platforms = {DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name, area_name=area, domains=[DOMAIN], assistant=intent_obj.assistant
|
||||
)
|
||||
match_result = intent.async_match_targets(hass, match_constraints)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
class SetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle SetTemperature intents."""
|
||||
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["aiocomelit"],
|
||||
"requirements": ["aiocomelit==0.11.1"]
|
||||
"requirements": ["aiocomelit==0.10.1"]
|
||||
}
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/conversation",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.26"]
|
||||
"requirements": ["hassil==2.2.3", "home-assistant-intents==2025.2.5"]
|
||||
}
|
||||
|
@@ -62,14 +62,12 @@ class ConversationResult:
|
||||
|
||||
response: intent.IntentResponse
|
||||
conversation_id: str | None = None
|
||||
continue_conversation: bool = False
|
||||
|
||||
def as_dict(self) -> dict[str, Any]:
|
||||
"""Return result as a dict."""
|
||||
return {
|
||||
"response": self.response.as_dict(),
|
||||
"conversation_id": self.conversation_id,
|
||||
"continue_conversation": self.continue_conversation,
|
||||
}
|
||||
|
||||
|
||||
|
@@ -48,7 +48,6 @@ COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [
|
||||
Platform.TIME,
|
||||
Platform.UPDATE,
|
||||
Platform.VACUUM,
|
||||
Platform.VALVE,
|
||||
Platform.WATER_HEATER,
|
||||
Platform.WEATHER,
|
||||
]
|
||||
|
@@ -1,89 +0,0 @@
|
||||
"""Demo valve platform that implements valves."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.components.valve import ValveEntity, ValveEntityFeature, ValveState
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
OPEN_CLOSE_DELAY = 2 # Used to give a realistic open/close experience in frontend
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config_entry: ConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up the Demo config entry."""
|
||||
async_add_entities(
|
||||
[
|
||||
DemoValve("Front Garden", ValveState.OPEN),
|
||||
DemoValve("Orchard", ValveState.CLOSED),
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
class DemoValve(ValveEntity):
|
||||
"""Representation of a Demo valve."""
|
||||
|
||||
_attr_should_poll = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
state: str,
|
||||
moveable: bool = True,
|
||||
) -> None:
|
||||
"""Initialize the valve."""
|
||||
self._attr_name = name
|
||||
if moveable:
|
||||
self._attr_supported_features = (
|
||||
ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE
|
||||
)
|
||||
self._state = state
|
||||
self._moveable = moveable
|
||||
|
||||
@property
|
||||
def is_open(self) -> bool:
|
||||
"""Return true if valve is open."""
|
||||
return self._state == ValveState.OPEN
|
||||
|
||||
@property
|
||||
def is_opening(self) -> bool:
|
||||
"""Return true if valve is opening."""
|
||||
return self._state == ValveState.OPENING
|
||||
|
||||
@property
|
||||
def is_closing(self) -> bool:
|
||||
"""Return true if valve is closing."""
|
||||
return self._state == ValveState.CLOSING
|
||||
|
||||
@property
|
||||
def is_closed(self) -> bool:
|
||||
"""Return true if valve is closed."""
|
||||
return self._state == ValveState.CLOSED
|
||||
|
||||
@property
|
||||
def reports_position(self) -> bool:
|
||||
"""Return True if entity reports position, False otherwise."""
|
||||
return False
|
||||
|
||||
async def async_open_valve(self, **kwargs: Any) -> None:
|
||||
"""Open the valve."""
|
||||
self._state = ValveState.OPENING
|
||||
self.async_write_ha_state()
|
||||
await asyncio.sleep(OPEN_CLOSE_DELAY)
|
||||
self._state = ValveState.OPEN
|
||||
self.async_write_ha_state()
|
||||
|
||||
async def async_close_valve(self, **kwargs: Any) -> None:
|
||||
"""Close the valve."""
|
||||
self._state = ValveState.CLOSING
|
||||
self.async_write_ha_state()
|
||||
await asyncio.sleep(OPEN_CLOSE_DELAY)
|
||||
self._state = ValveState.CLOSED
|
||||
self.async_write_ha_state()
|
@@ -24,14 +24,7 @@ from homeassistant.const import (
|
||||
STATE_UNKNOWN,
|
||||
UnitOfTime,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
EventStateReportedData,
|
||||
HomeAssistant,
|
||||
State,
|
||||
callback,
|
||||
)
|
||||
from homeassistant.core import Event, EventStateChangedData, HomeAssistant, callback
|
||||
from homeassistant.helpers import config_validation as cv, entity_registry as er
|
||||
from homeassistant.helpers.device import async_device_info_to_link_from_entity
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
@@ -39,10 +32,7 @@ from homeassistant.helpers.entity_platform import (
|
||||
AddConfigEntryEntitiesCallback,
|
||||
AddEntitiesCallback,
|
||||
)
|
||||
from homeassistant.helpers.event import (
|
||||
async_track_state_change_event,
|
||||
async_track_state_report_event,
|
||||
)
|
||||
from homeassistant.helpers.event import async_track_state_change_event
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from .const import (
|
||||
@@ -210,33 +200,13 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("Could not restore last state: %s", err)
|
||||
|
||||
@callback
|
||||
def on_state_reported(event: Event[EventStateReportedData]) -> None:
|
||||
"""Handle constant sensor state."""
|
||||
if self._attr_native_value == Decimal(0):
|
||||
# If the derivative is zero, and the source sensor hasn't
|
||||
# changed state, then we know it will still be zero.
|
||||
return
|
||||
new_state = event.data["new_state"]
|
||||
if new_state is not None:
|
||||
calc_derivative(
|
||||
new_state, new_state.state, event.data["old_last_reported"]
|
||||
)
|
||||
|
||||
@callback
|
||||
def on_state_changed(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle changed sensor state."""
|
||||
new_state = event.data["new_state"]
|
||||
old_state = event.data["old_state"]
|
||||
if new_state is not None and old_state is not None:
|
||||
calc_derivative(new_state, old_state.state, old_state.last_reported)
|
||||
|
||||
def calc_derivative(
|
||||
new_state: State, old_value: str, old_last_reported: datetime
|
||||
) -> None:
|
||||
def calc_derivative(event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle the sensor state changes."""
|
||||
if old_value in (STATE_UNKNOWN, STATE_UNAVAILABLE) or new_state.state in (
|
||||
STATE_UNKNOWN,
|
||||
STATE_UNAVAILABLE,
|
||||
if (
|
||||
(old_state := event.data["old_state"]) is None
|
||||
or old_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
or (new_state := event.data["new_state"]) is None
|
||||
or new_state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE)
|
||||
):
|
||||
return
|
||||
|
||||
@@ -250,15 +220,15 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
self._state_list = [
|
||||
(time_start, time_end, state)
|
||||
for time_start, time_end, state in self._state_list
|
||||
if (new_state.last_reported - time_end).total_seconds()
|
||||
if (new_state.last_updated - time_end).total_seconds()
|
||||
< self._time_window
|
||||
]
|
||||
|
||||
try:
|
||||
elapsed_time = (
|
||||
new_state.last_reported - old_last_reported
|
||||
new_state.last_updated - old_state.last_updated
|
||||
).total_seconds()
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_value)
|
||||
delta_value = Decimal(new_state.state) - Decimal(old_state.state)
|
||||
new_derivative = (
|
||||
delta_value
|
||||
/ Decimal(elapsed_time)
|
||||
@@ -270,7 +240,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
_LOGGER.warning("While calculating derivative: %s", err)
|
||||
except DecimalException as err:
|
||||
_LOGGER.warning(
|
||||
"Invalid state (%s > %s): %s", old_value, new_state.state, err
|
||||
"Invalid state (%s > %s): %s", old_state.state, new_state.state, err
|
||||
)
|
||||
except AssertionError as err:
|
||||
_LOGGER.error("Could not calculate derivative: %s", err)
|
||||
@@ -287,7 +257,7 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
|
||||
# add latest derivative to the window list
|
||||
self._state_list.append(
|
||||
(old_last_reported, new_state.last_reported, new_derivative)
|
||||
(old_state.last_updated, new_state.last_updated, new_derivative)
|
||||
)
|
||||
|
||||
def calculate_weight(
|
||||
@@ -307,19 +277,13 @@ class DerivativeSensor(RestoreSensor, SensorEntity):
|
||||
else:
|
||||
derivative = Decimal("0.00")
|
||||
for start, end, value in self._state_list:
|
||||
weight = calculate_weight(start, end, new_state.last_reported)
|
||||
weight = calculate_weight(start, end, new_state.last_updated)
|
||||
derivative = derivative + (value * Decimal(weight))
|
||||
self._attr_native_value = round(derivative, self._round_digits)
|
||||
self.async_write_ha_state()
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_change_event(
|
||||
self.hass, self._sensor_source_id, on_state_changed
|
||||
)
|
||||
)
|
||||
|
||||
self.async_on_remove(
|
||||
async_track_state_report_event(
|
||||
self.hass, self._sensor_source_id, on_state_reported
|
||||
self.hass, self._sensor_source_id, calc_derivative
|
||||
)
|
||||
)
|
||||
|
@@ -8,7 +8,6 @@ from devolo_plc_api.device_api import (
|
||||
WifiGuestAccessGet,
|
||||
)
|
||||
from devolo_plc_api.plcnet_api import DataRate, LogicalNetwork
|
||||
from yarl import URL
|
||||
|
||||
from homeassistant.const import ATTR_CONNECTIONS
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
@@ -44,7 +43,7 @@ class DevoloEntity(Entity):
|
||||
self.entry = entry
|
||||
|
||||
self._attr_device_info = DeviceInfo(
|
||||
configuration_url=URL.build(scheme="http", host=self.device.ip),
|
||||
configuration_url=f"http://{self.device.ip}",
|
||||
identifiers={(DOMAIN, str(self.device.serial_number))},
|
||||
manufacturer="devolo",
|
||||
model=self.device.product,
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/ecovacs",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["sleekxmppfs", "sucks", "deebot_client"],
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.3.1"]
|
||||
"requirements": ["py-sucks==0.9.10", "deebot-client==12.2.0"]
|
||||
}
|
||||
|
@@ -105,7 +105,6 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
AlarmControlPanelEntityFeature.ARM_HOME
|
||||
| AlarmControlPanelEntityFeature.ARM_AWAY
|
||||
| AlarmControlPanelEntityFeature.ARM_NIGHT
|
||||
| AlarmControlPanelEntityFeature.ARM_VACATION
|
||||
)
|
||||
_element: Area
|
||||
|
||||
@@ -205,7 +204,7 @@ class ElkArea(ElkAttachedEntity, AlarmControlPanelEntity, RestoreEntity):
|
||||
ArmedStatus.ARMED_STAY_INSTANT: AlarmControlPanelState.ARMED_HOME,
|
||||
ArmedStatus.ARMED_TO_NIGHT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_NIGHT_INSTANT: AlarmControlPanelState.ARMED_NIGHT,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_VACATION,
|
||||
ArmedStatus.ARMED_TO_VACATION: AlarmControlPanelState.ARMED_AWAY,
|
||||
}
|
||||
|
||||
if self._element.alarm_state is None:
|
||||
|
@@ -6,5 +6,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/environment_canada",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["env_canada"],
|
||||
"requirements": ["env-canada==0.8.0"]
|
||||
"requirements": ["env-canada==0.7.2"]
|
||||
}
|
||||
|
@@ -22,5 +22,5 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["eq3btsmart"],
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.9.0"]
|
||||
"requirements": ["eq3btsmart==1.4.1", "bleak-esphome==2.7.1"]
|
||||
}
|
||||
|
@@ -16,7 +16,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import CONF_BLUETOOTH_MAC_ADDRESS, CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN
|
||||
from .const import CONF_NOISE_PSK, DATA_FFMPEG_PROXY, DOMAIN
|
||||
from .dashboard import async_setup as async_setup_dashboard
|
||||
from .domain_data import DomainData
|
||||
|
||||
@@ -87,6 +87,6 @@ async def async_unload_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) ->
|
||||
|
||||
async def async_remove_entry(hass: HomeAssistant, entry: ESPHomeConfigEntry) -> None:
|
||||
"""Remove an esphome config entry."""
|
||||
if bluetooth_mac_address := entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS):
|
||||
async_remove_scanner(hass, bluetooth_mac_address.upper())
|
||||
if mac_address := entry.unique_id:
|
||||
async_remove_scanner(hass, mac_address.upper())
|
||||
await DomainData.get(hass).get_or_create_store(hass, entry).async_remove()
|
||||
|
@@ -284,10 +284,7 @@ class EsphomeAssistSatellite(
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_INTENT_END:
|
||||
assert event.data is not None
|
||||
data_to_send = {
|
||||
"conversation_id": event.data["intent_output"]["conversation_id"],
|
||||
"continue_conversation": str(
|
||||
int(event.data["intent_output"]["continue_conversation"])
|
||||
),
|
||||
"conversation_id": event.data["intent_output"]["conversation_id"] or "",
|
||||
}
|
||||
elif event_type == VoiceAssistantEventType.VOICE_ASSISTANT_TTS_START:
|
||||
assert event.data is not None
|
||||
|
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from functools import partial
|
||||
from math import isfinite
|
||||
from typing import Any, cast
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -239,13 +238,9 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti
|
||||
@esphome_state_property
|
||||
def current_humidity(self) -> int | None:
|
||||
"""Return the current humidity."""
|
||||
if (
|
||||
not self._static_info.supports_current_humidity
|
||||
or (val := self._state.current_humidity) is None
|
||||
or not isfinite(val)
|
||||
):
|
||||
if not self._static_info.supports_current_humidity:
|
||||
return None
|
||||
return round(val)
|
||||
return round(self._state.current_humidity)
|
||||
|
||||
@property
|
||||
@esphome_float_state_property
|
||||
|
@@ -41,7 +41,6 @@ from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_NOISE_PSK,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS,
|
||||
DOMAIN,
|
||||
@@ -509,10 +508,6 @@ class OptionsFlowHandler(OptionsFlow):
|
||||
CONF_ALLOW_SERVICE_CALLS, DEFAULT_ALLOW_SERVICE_CALLS
|
||||
),
|
||||
): bool,
|
||||
vol.Required(
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
default=self.config_entry.options.get(CONF_SUBSCRIBE_LOGS, False),
|
||||
): bool,
|
||||
}
|
||||
)
|
||||
return self.async_show_form(step_id="init", data_schema=data_schema)
|
||||
|
@@ -5,22 +5,18 @@ from awesomeversion import AwesomeVersion
|
||||
DOMAIN = "esphome"
|
||||
|
||||
CONF_ALLOW_SERVICE_CALLS = "allow_service_calls"
|
||||
CONF_SUBSCRIBE_LOGS = "subscribe_logs"
|
||||
CONF_DEVICE_NAME = "device_name"
|
||||
CONF_NOISE_PSK = "noise_psk"
|
||||
CONF_BLUETOOTH_MAC_ADDRESS = "bluetooth_mac_address"
|
||||
|
||||
DEFAULT_ALLOW_SERVICE_CALLS = True
|
||||
DEFAULT_NEW_CONFIG_ALLOW_ALLOW_SERVICE_CALLS = False
|
||||
|
||||
|
||||
STABLE_BLE_VERSION_STR = "2025.2.1"
|
||||
STABLE_BLE_VERSION_STR = "2023.8.0"
|
||||
STABLE_BLE_VERSION = AwesomeVersion(STABLE_BLE_VERSION_STR)
|
||||
PROJECT_URLS = {
|
||||
"esphome.bluetooth-proxy": "https://esphome.github.io/bluetooth-proxies/",
|
||||
}
|
||||
# ESPHome always uses .0 for the changelog URL
|
||||
STABLE_BLE_URL_VERSION = f"{STABLE_BLE_VERSION.major}.{STABLE_BLE_VERSION.minor}.0"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_URL_VERSION}.html"
|
||||
DEFAULT_URL = f"https://esphome.io/changelog/{STABLE_BLE_VERSION_STR}.html"
|
||||
|
||||
DATA_FFMPEG_PROXY = f"{DOMAIN}.ffmpeg_proxy"
|
||||
|
@@ -13,7 +13,9 @@ from . import CONF_NOISE_PSK
|
||||
from .dashboard import async_get_dashboard
|
||||
from .entry_data import ESPHomeConfigEntry
|
||||
|
||||
REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, "mac_address", "bluetooth_mac_address"}
|
||||
CONF_MAC_ADDRESS = "mac_address"
|
||||
|
||||
REDACT_KEYS = {CONF_NOISE_PSK, CONF_PASSWORD, CONF_MAC_ADDRESS}
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
@@ -25,17 +27,13 @@ async def async_get_config_entry_diagnostics(
|
||||
diag["config"] = config_entry.as_dict()
|
||||
|
||||
entry_data = config_entry.runtime_data
|
||||
device_info = entry_data.device_info
|
||||
|
||||
if (storage_data := await entry_data.store.async_load()) is not None:
|
||||
diag["storage_data"] = storage_data
|
||||
|
||||
if (
|
||||
device_info
|
||||
and (
|
||||
scanner_mac := device_info.bluetooth_mac_address or device_info.mac_address
|
||||
)
|
||||
and (scanner := async_scanner_by_source(hass, scanner_mac.upper()))
|
||||
config_entry.unique_id
|
||||
and (scanner := async_scanner_by_source(hass, config_entry.unique_id.upper()))
|
||||
and (bluetooth_device := entry_data.bluetooth_device)
|
||||
):
|
||||
diag["bluetooth"] = {
|
||||
|
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from functools import partial
|
||||
import logging
|
||||
import re
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
|
||||
from aioesphomeapi import (
|
||||
@@ -17,7 +16,6 @@ from aioesphomeapi import (
|
||||
HomeassistantServiceCall,
|
||||
InvalidAuthAPIError,
|
||||
InvalidEncryptionKeyAPIError,
|
||||
LogLevel,
|
||||
ReconnectLogic,
|
||||
RequiresEncryptionAPIError,
|
||||
UserService,
|
||||
@@ -35,7 +33,6 @@ from homeassistant.const import (
|
||||
Platform,
|
||||
)
|
||||
from homeassistant.core import (
|
||||
CALLBACK_TYPE,
|
||||
Event,
|
||||
EventStateChangedData,
|
||||
HomeAssistant,
|
||||
@@ -63,9 +60,7 @@ from homeassistant.util.async_ import create_eager_task
|
||||
from .bluetooth import async_connect_scanner
|
||||
from .const import (
|
||||
CONF_ALLOW_SERVICE_CALLS,
|
||||
CONF_BLUETOOTH_MAC_ADDRESS,
|
||||
CONF_DEVICE_NAME,
|
||||
CONF_SUBSCRIBE_LOGS,
|
||||
DEFAULT_ALLOW_SERVICE_CALLS,
|
||||
DEFAULT_URL,
|
||||
DOMAIN,
|
||||
@@ -79,38 +74,8 @@ from .domain_data import DomainData
|
||||
# Import config flow so that it's added to the registry
|
||||
from .entry_data import ESPHomeConfigEntry, RuntimeEntryData
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from aioesphomeapi.api_pb2 import ( # type: ignore[attr-defined]
|
||||
SubscribeLogsResponse,
|
||||
)
|
||||
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
LOG_LEVEL_TO_LOGGER = {
|
||||
LogLevel.LOG_LEVEL_NONE: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_ERROR: logging.ERROR,
|
||||
LogLevel.LOG_LEVEL_WARN: logging.WARNING,
|
||||
LogLevel.LOG_LEVEL_INFO: logging.INFO,
|
||||
LogLevel.LOG_LEVEL_CONFIG: logging.INFO,
|
||||
LogLevel.LOG_LEVEL_DEBUG: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_VERBOSE: logging.DEBUG,
|
||||
LogLevel.LOG_LEVEL_VERY_VERBOSE: logging.DEBUG,
|
||||
}
|
||||
LOGGER_TO_LOG_LEVEL = {
|
||||
logging.NOTSET: LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
logging.DEBUG: LogLevel.LOG_LEVEL_VERY_VERBOSE,
|
||||
logging.INFO: LogLevel.LOG_LEVEL_CONFIG,
|
||||
logging.WARNING: LogLevel.LOG_LEVEL_WARN,
|
||||
logging.ERROR: LogLevel.LOG_LEVEL_ERROR,
|
||||
logging.CRITICAL: LogLevel.LOG_LEVEL_ERROR,
|
||||
}
|
||||
# 7-bit and 8-bit C1 ANSI sequences
|
||||
# https://stackoverflow.com/questions/14693701/how-can-i-remove-the-ansi-escape-sequences-from-a-string-in-python
|
||||
ANSI_ESCAPE_78BIT = re.compile(
|
||||
rb"(?:\x1B[@-Z\\-_]|[\x80-\x9A\x9C-\x9F]|(?:\x1B\[|\x9B)[0-?]*[ -/]*[@-~])"
|
||||
)
|
||||
|
||||
|
||||
@callback
|
||||
def _async_check_firmware_version(
|
||||
@@ -171,8 +136,6 @@ class ESPHomeManager:
|
||||
"""Class to manage an ESPHome connection."""
|
||||
|
||||
__slots__ = (
|
||||
"_cancel_subscribe_logs",
|
||||
"_log_level",
|
||||
"cli",
|
||||
"device_id",
|
||||
"domain_data",
|
||||
@@ -206,8 +169,6 @@ class ESPHomeManager:
|
||||
self.reconnect_logic: ReconnectLogic | None = None
|
||||
self.zeroconf_instance = zeroconf_instance
|
||||
self.entry_data = entry.runtime_data
|
||||
self._cancel_subscribe_logs: CALLBACK_TYPE | None = None
|
||||
self._log_level = LogLevel.LOG_LEVEL_NONE
|
||||
|
||||
async def on_stop(self, event: Event) -> None:
|
||||
"""Cleanup the socket client on HA close."""
|
||||
@@ -380,34 +341,6 @@ class ESPHomeManager:
|
||||
# Re-connection logic will trigger after this
|
||||
await self.cli.disconnect()
|
||||
|
||||
def _async_on_log(self, msg: SubscribeLogsResponse) -> None:
|
||||
"""Handle a log message from the API."""
|
||||
log: bytes = msg.message
|
||||
_LOGGER.log(
|
||||
LOG_LEVEL_TO_LOGGER.get(msg.level, logging.DEBUG),
|
||||
"%s: %s",
|
||||
self.entry.title,
|
||||
ANSI_ESCAPE_78BIT.sub(b"", log).decode("utf-8", "backslashreplace"),
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_get_equivalent_log_level(self) -> LogLevel:
|
||||
"""Get the equivalent ESPHome log level for the current logger."""
|
||||
return LOGGER_TO_LOG_LEVEL.get(
|
||||
_LOGGER.getEffectiveLevel(), LogLevel.LOG_LEVEL_VERY_VERBOSE
|
||||
)
|
||||
|
||||
@callback
|
||||
def _async_subscribe_logs(self, log_level: LogLevel) -> None:
|
||||
"""Subscribe to logs."""
|
||||
if self._cancel_subscribe_logs is not None:
|
||||
self._cancel_subscribe_logs()
|
||||
self._cancel_subscribe_logs = None
|
||||
self._log_level = log_level
|
||||
self._cancel_subscribe_logs = self.cli.subscribe_logs(
|
||||
self._async_on_log, self._log_level
|
||||
)
|
||||
|
||||
async def _on_connnect(self) -> None:
|
||||
"""Subscribe to states and list entities on successful API login."""
|
||||
entry = self.entry
|
||||
@@ -419,8 +352,6 @@ class ESPHomeManager:
|
||||
cli = self.cli
|
||||
stored_device_name = entry.data.get(CONF_DEVICE_NAME)
|
||||
unique_id_is_mac_address = unique_id and ":" in unique_id
|
||||
if entry.options.get(CONF_SUBSCRIBE_LOGS):
|
||||
self._async_subscribe_logs(self._async_get_equivalent_log_level())
|
||||
results = await asyncio.gather(
|
||||
create_eager_task(cli.device_info()),
|
||||
create_eager_task(cli.list_entities_services()),
|
||||
@@ -432,13 +363,6 @@ class ESPHomeManager:
|
||||
|
||||
device_mac = format_mac(device_info.mac_address)
|
||||
mac_address_matches = unique_id == device_mac
|
||||
if (
|
||||
bluetooth_mac_address := device_info.bluetooth_mac_address
|
||||
) and entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) != bluetooth_mac_address:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={**entry.data, CONF_BLUETOOTH_MAC_ADDRESS: bluetooth_mac_address},
|
||||
)
|
||||
#
|
||||
# Migrate config entry to new unique ID if the current
|
||||
# unique id is not a mac address.
|
||||
@@ -506,9 +430,7 @@ class ESPHomeManager:
|
||||
)
|
||||
)
|
||||
else:
|
||||
bluetooth.async_remove_scanner(
|
||||
hass, device_info.bluetooth_mac_address or device_info.mac_address
|
||||
)
|
||||
bluetooth.async_remove_scanner(hass, device_info.mac_address)
|
||||
|
||||
if device_info.voice_assistant_feature_flags_compat(api_version) and (
|
||||
Platform.ASSIST_SATELLITE not in entry_data.loaded_platforms
|
||||
@@ -581,10 +503,6 @@ class ESPHomeManager:
|
||||
def _async_handle_logging_changed(self, _event: Event) -> None:
|
||||
"""Handle when the logging level changes."""
|
||||
self.cli.set_debug(_LOGGER.isEnabledFor(logging.DEBUG))
|
||||
if self.entry.options.get(CONF_SUBSCRIBE_LOGS) and self._log_level != (
|
||||
new_log_level := self._async_get_equivalent_log_level()
|
||||
):
|
||||
self._async_subscribe_logs(new_log_level)
|
||||
|
||||
async def async_start(self) -> None:
|
||||
"""Start the esphome connection manager."""
|
||||
@@ -627,22 +545,11 @@ class ESPHomeManager:
|
||||
)
|
||||
_setup_services(hass, entry_data, services)
|
||||
|
||||
if (device_info := entry_data.device_info) is not None:
|
||||
if device_info.name:
|
||||
reconnect_logic.name = device_info.name
|
||||
if (
|
||||
bluetooth_mac_address := device_info.bluetooth_mac_address
|
||||
) and entry.data.get(CONF_BLUETOOTH_MAC_ADDRESS) != bluetooth_mac_address:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry,
|
||||
data={
|
||||
**entry.data,
|
||||
CONF_BLUETOOTH_MAC_ADDRESS: bluetooth_mac_address,
|
||||
},
|
||||
)
|
||||
if entry_data.device_info is not None and entry_data.device_info.name:
|
||||
reconnect_logic.name = entry_data.device_info.name
|
||||
if entry.unique_id is None:
|
||||
hass.config_entries.async_update_entry(
|
||||
entry, unique_id=format_mac(device_info.mac_address)
|
||||
entry, unique_id=format_mac(entry_data.device_info.mac_address)
|
||||
)
|
||||
|
||||
await reconnect_logic.start()
|
||||
|
@@ -16,9 +16,9 @@
|
||||
"loggers": ["aioesphomeapi", "noiseprotocol", "bleak_esphome"],
|
||||
"mqtt": ["esphome/discover/#"],
|
||||
"requirements": [
|
||||
"aioesphomeapi==29.3.2",
|
||||
"aioesphomeapi==29.1.1",
|
||||
"esphome-dashboard-api==1.2.3",
|
||||
"bleak-esphome==2.9.0"
|
||||
"bleak-esphome==2.7.1"
|
||||
],
|
||||
"zeroconf": ["_esphomelib._tcp.local."]
|
||||
}
|
||||
|
@@ -54,8 +54,7 @@
|
||||
"step": {
|
||||
"init": {
|
||||
"data": {
|
||||
"allow_service_calls": "Allow the device to perform Home Assistant actions.",
|
||||
"subscribe_logs": "Subscribe to logs from the device. When enabled, the device will send logs to Home Assistant and you can view them in the logs panel."
|
||||
"allow_service_calls": "Allow the device to perform Home Assistant actions."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -7,21 +7,21 @@ from collections.abc import Callable, Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyfibaro.fibaro_client import (
|
||||
FibaroAuthenticationFailed,
|
||||
FibaroClient,
|
||||
FibaroConnectFailed,
|
||||
)
|
||||
from pyfibaro.fibaro_data_helper import read_rooms
|
||||
from pyfibaro.fibaro_client import FibaroClient
|
||||
from pyfibaro.fibaro_device import DeviceModel
|
||||
from pyfibaro.fibaro_info import InfoModel
|
||||
from pyfibaro.fibaro_room import RoomModel
|
||||
from pyfibaro.fibaro_scene import SceneModel
|
||||
from pyfibaro.fibaro_state_resolver import FibaroEvent, FibaroStateResolver
|
||||
from requests.exceptions import HTTPError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.exceptions import (
|
||||
ConfigEntryAuthFailed,
|
||||
ConfigEntryNotReady,
|
||||
HomeAssistantError,
|
||||
)
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.helpers.device_registry import DeviceEntry, DeviceInfo
|
||||
from homeassistant.util import slugify
|
||||
@@ -74,31 +74,63 @@ FIBARO_TYPEMAP = {
|
||||
class FibaroController:
|
||||
"""Initiate Fibaro Controller Class."""
|
||||
|
||||
def __init__(
|
||||
self, fibaro_client: FibaroClient, info: InfoModel, import_plugins: bool
|
||||
) -> None:
|
||||
def __init__(self, config: Mapping[str, Any]) -> None:
|
||||
"""Initialize the Fibaro controller."""
|
||||
self._client = fibaro_client
|
||||
self._fibaro_info = info
|
||||
|
||||
# The FibaroClient uses the correct API version automatically
|
||||
self._client = FibaroClient(config[CONF_URL])
|
||||
self._client.set_authentication(config[CONF_USERNAME], config[CONF_PASSWORD])
|
||||
|
||||
# Whether to import devices from plugins
|
||||
self._import_plugins = import_plugins
|
||||
# Mapping roomId to room object
|
||||
self._room_map = read_rooms(fibaro_client)
|
||||
self._import_plugins = config[CONF_IMPORT_PLUGINS]
|
||||
self._room_map: dict[int, RoomModel] # Mapping roomId to room object
|
||||
self._device_map: dict[int, DeviceModel] # Mapping deviceId to device object
|
||||
self.fibaro_devices: dict[Platform, list[DeviceModel]] = defaultdict(
|
||||
list
|
||||
) # List of devices by entity platform
|
||||
# All scenes
|
||||
self._scenes = self._client.read_scenes()
|
||||
self._scenes: list[SceneModel] = []
|
||||
self._callbacks: dict[int, list[Any]] = {} # Update value callbacks by deviceId
|
||||
# Event callbacks by device id
|
||||
self._event_callbacks: dict[int, list[Callable[[FibaroEvent], None]]] = {}
|
||||
# Unique serial number of the hub
|
||||
self.hub_serial = info.serial_number
|
||||
self.hub_serial: str # Unique serial number of the hub
|
||||
self.hub_name: str # The friendly name of the hub
|
||||
self.hub_model: str
|
||||
self.hub_software_version: str
|
||||
self.hub_api_url: str = config[CONF_URL]
|
||||
# Device infos by fibaro device id
|
||||
self._device_infos: dict[int, DeviceInfo] = {}
|
||||
|
||||
def connect(self) -> None:
|
||||
"""Start the communication with the Fibaro controller."""
|
||||
|
||||
# Return value doesn't need to be checked,
|
||||
# it is only relevant when connecting without credentials
|
||||
self._client.connect()
|
||||
info = self._client.read_info()
|
||||
self.hub_serial = info.serial_number
|
||||
self.hub_name = info.hc_name
|
||||
self.hub_model = info.platform
|
||||
self.hub_software_version = info.current_version
|
||||
|
||||
self._room_map = {room.fibaro_id: room for room in self._client.read_rooms()}
|
||||
self._read_devices()
|
||||
self._scenes = self._client.read_scenes()
|
||||
|
||||
def connect_with_error_handling(self) -> None:
|
||||
"""Translate connect errors to easily differentiate auth and connect failures.
|
||||
|
||||
When there is a better error handling in the used library this can be improved.
|
||||
"""
|
||||
try:
|
||||
self.connect()
|
||||
except HTTPError as http_ex:
|
||||
if http_ex.response.status_code == 403:
|
||||
raise FibaroAuthFailed from http_ex
|
||||
|
||||
raise FibaroConnectFailed from http_ex
|
||||
except Exception as ex:
|
||||
raise FibaroConnectFailed from ex
|
||||
|
||||
def enable_state_handler(self) -> None:
|
||||
"""Start StateHandler thread for monitoring updates."""
|
||||
@@ -270,20 +302,14 @@ class FibaroController:
|
||||
|
||||
def get_room_name(self, room_id: int) -> str | None:
|
||||
"""Get the room name by room id."""
|
||||
return self._room_map.get(room_id)
|
||||
assert self._room_map
|
||||
room = self._room_map.get(room_id)
|
||||
return room.name if room else None
|
||||
|
||||
def read_scenes(self) -> list[SceneModel]:
|
||||
"""Return list of scenes."""
|
||||
return self._scenes
|
||||
|
||||
def read_fibaro_info(self) -> InfoModel:
|
||||
"""Return the general info about the hub."""
|
||||
return self._fibaro_info
|
||||
|
||||
def get_frontend_url(self) -> str:
|
||||
"""Return the url to the Fibaro hub web UI."""
|
||||
return self._client.frontend_url()
|
||||
|
||||
def _read_devices(self) -> None:
|
||||
"""Read and process the device list."""
|
||||
devices = self._client.read_devices()
|
||||
@@ -293,17 +319,20 @@ class FibaroController:
|
||||
for device in devices:
|
||||
try:
|
||||
device.fibaro_controller = self
|
||||
room_name = self.get_room_name(device.room_id)
|
||||
if not room_name:
|
||||
if device.room_id == 0:
|
||||
room_name = "Unknown"
|
||||
else:
|
||||
room_name = self._room_map[device.room_id].name
|
||||
device.room_name = room_name
|
||||
device.friendly_name = f"{room_name} {device.name}"
|
||||
device.ha_id = (
|
||||
f"{slugify(room_name)}_{slugify(device.name)}_{device.fibaro_id}"
|
||||
)
|
||||
if device.enabled and (not device.is_plugin or self._import_plugins):
|
||||
platform = self._map_device_to_platform(device)
|
||||
if platform is None:
|
||||
device.mapped_platform = self._map_device_to_platform(device)
|
||||
else:
|
||||
device.mapped_platform = None
|
||||
if (platform := device.mapped_platform) is None:
|
||||
continue
|
||||
device.unique_id_str = f"{slugify(self.hub_serial)}.{device.fibaro_id}"
|
||||
self._create_device_info(device, devices)
|
||||
@@ -346,17 +375,11 @@ class FibaroController:
|
||||
pass
|
||||
|
||||
|
||||
def connect_fibaro_client(data: Mapping[str, Any]) -> tuple[InfoModel, FibaroClient]:
|
||||
"""Connect to the fibaro hub and read some basic data."""
|
||||
client = FibaroClient(data[CONF_URL])
|
||||
info = client.connect_with_credentials(data[CONF_USERNAME], data[CONF_PASSWORD])
|
||||
return (info, client)
|
||||
|
||||
|
||||
def init_controller(data: Mapping[str, Any]) -> FibaroController:
|
||||
"""Connect to the fibaro hub and init the controller."""
|
||||
info, client = connect_fibaro_client(data)
|
||||
return FibaroController(client, info, data[CONF_IMPORT_PLUGINS])
|
||||
"""Validate the user input allows us to connect to fibaro."""
|
||||
controller = FibaroController(data)
|
||||
controller.connect_with_error_handling()
|
||||
return controller
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bool:
|
||||
@@ -370,24 +393,22 @@ async def async_setup_entry(hass: HomeAssistant, entry: FibaroConfigEntry) -> bo
|
||||
raise ConfigEntryNotReady(
|
||||
f"Could not connect to controller at {entry.data[CONF_URL]}"
|
||||
) from connect_ex
|
||||
except FibaroAuthenticationFailed as auth_ex:
|
||||
except FibaroAuthFailed as auth_ex:
|
||||
raise ConfigEntryAuthFailed from auth_ex
|
||||
|
||||
entry.runtime_data = controller
|
||||
|
||||
# register the hub device info separately as the hub has sometimes no entities
|
||||
fibaro_info = controller.read_fibaro_info()
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, controller.hub_serial)},
|
||||
serial_number=controller.hub_serial,
|
||||
manufacturer=fibaro_info.manufacturer_name,
|
||||
name=fibaro_info.hc_name,
|
||||
model=fibaro_info.model_name,
|
||||
sw_version=fibaro_info.current_version,
|
||||
configuration_url=controller.get_frontend_url(),
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, fibaro_info.mac_address)},
|
||||
manufacturer="Fibaro",
|
||||
name=controller.hub_name,
|
||||
model=controller.hub_model,
|
||||
sw_version=controller.hub_software_version,
|
||||
configuration_url=controller.hub_api_url.removesuffix("/api/"),
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
@@ -422,3 +443,11 @@ async def async_remove_config_entry_device(
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class FibaroConnectFailed(HomeAssistantError):
|
||||
"""Error to indicate we cannot connect to fibaro home center."""
|
||||
|
||||
|
||||
class FibaroAuthFailed(HomeAssistantError):
|
||||
"""Error to indicate that authentication failed on fibaro home center."""
|
||||
|
@@ -129,13 +129,13 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the Fibaro device."""
|
||||
super().__init__(fibaro_device)
|
||||
self._temp_sensor_device: DeviceModel | None = None
|
||||
self._target_temp_device: DeviceModel | None = None
|
||||
self._op_mode_device: DeviceModel | None = None
|
||||
self._fan_mode_device: DeviceModel | None = None
|
||||
self._temp_sensor_device: FibaroEntity | None = None
|
||||
self._target_temp_device: FibaroEntity | None = None
|
||||
self._op_mode_device: FibaroEntity | None = None
|
||||
self._fan_mode_device: FibaroEntity | None = None
|
||||
self.entity_id = ENTITY_ID_FORMAT.format(self.ha_id)
|
||||
|
||||
siblings = self.controller.get_siblings(fibaro_device)
|
||||
siblings = fibaro_device.fibaro_controller.get_siblings(fibaro_device)
|
||||
_LOGGER.debug("%s siblings: %s", fibaro_device.ha_id, siblings)
|
||||
tempunit = "C"
|
||||
for device in siblings:
|
||||
@@ -147,23 +147,23 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
and (device.value.has_value or device.has_heating_thermostat_setpoint)
|
||||
and device.unit in ("C", "F")
|
||||
):
|
||||
self._temp_sensor_device = device
|
||||
self._temp_sensor_device = FibaroEntity(device)
|
||||
tempunit = device.unit
|
||||
|
||||
if any(
|
||||
action for action in TARGET_TEMP_ACTIONS if action in device.actions
|
||||
):
|
||||
self._target_temp_device = device
|
||||
self._target_temp_device = FibaroEntity(device)
|
||||
self._attr_supported_features |= ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
if device.has_unit:
|
||||
tempunit = device.unit
|
||||
|
||||
if any(action for action in OP_MODE_ACTIONS if action in device.actions):
|
||||
self._op_mode_device = device
|
||||
self._op_mode_device = FibaroEntity(device)
|
||||
self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE
|
||||
|
||||
if "setFanMode" in device.actions:
|
||||
self._fan_mode_device = device
|
||||
self._fan_mode_device = FibaroEntity(device)
|
||||
self._attr_supported_features |= ClimateEntityFeature.FAN_MODE
|
||||
|
||||
if tempunit == "F":
|
||||
@@ -172,7 +172,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
self._attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
|
||||
if self._fan_mode_device:
|
||||
fan_modes = self._fan_mode_device.supported_modes
|
||||
fan_modes = self._fan_mode_device.fibaro_device.supported_modes
|
||||
self._attr_fan_modes = []
|
||||
for mode in fan_modes:
|
||||
if mode not in FANMODES:
|
||||
@@ -184,7 +184,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if self._op_mode_device:
|
||||
self._attr_preset_modes = []
|
||||
self._attr_hvac_modes: list[HVACMode] = []
|
||||
device = self._op_mode_device
|
||||
device = self._op_mode_device.fibaro_device
|
||||
if device.has_supported_thermostat_modes:
|
||||
for mode in device.supported_thermostat_modes:
|
||||
try:
|
||||
@@ -222,15 +222,15 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
"- _fan_mode_device %s"
|
||||
),
|
||||
self.ha_id,
|
||||
self._temp_sensor_device.fibaro_id if self._temp_sensor_device else "None",
|
||||
self._target_temp_device.fibaro_id if self._target_temp_device else "None",
|
||||
self._op_mode_device.fibaro_id if self._op_mode_device else "None",
|
||||
self._fan_mode_device.fibaro_id if self._fan_mode_device else "None",
|
||||
self._temp_sensor_device.ha_id if self._temp_sensor_device else "None",
|
||||
self._target_temp_device.ha_id if self._target_temp_device else "None",
|
||||
self._op_mode_device.ha_id if self._op_mode_device else "None",
|
||||
self._fan_mode_device.ha_id if self._fan_mode_device else "None",
|
||||
)
|
||||
await super().async_added_to_hass()
|
||||
|
||||
# Register update callback for child devices
|
||||
siblings = self.controller.get_siblings(self.fibaro_device)
|
||||
siblings = self.fibaro_device.fibaro_controller.get_siblings(self.fibaro_device)
|
||||
for device in siblings:
|
||||
if device != self.fibaro_device:
|
||||
self.controller.register(device.fibaro_id, self._update_callback)
|
||||
@@ -240,14 +240,14 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
"""Return the fan setting."""
|
||||
if not self._fan_mode_device:
|
||||
return None
|
||||
mode = self._fan_mode_device.mode
|
||||
mode = self._fan_mode_device.fibaro_device.mode
|
||||
return FANMODES[mode]
|
||||
|
||||
def set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
if not self._fan_mode_device:
|
||||
return
|
||||
self._fan_mode_device.execute_action("setFanMode", [HA_FANMODES[fan_mode]])
|
||||
self._fan_mode_device.action("setFanMode", HA_FANMODES[fan_mode])
|
||||
|
||||
@property
|
||||
def fibaro_op_mode(self) -> str | int:
|
||||
@@ -255,7 +255,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return HA_OPMODES_HVAC[HVACMode.AUTO]
|
||||
|
||||
device = self._op_mode_device
|
||||
device = self._op_mode_device.fibaro_device
|
||||
|
||||
if device.has_operating_mode:
|
||||
return device.operating_mode
|
||||
@@ -281,17 +281,17 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return
|
||||
|
||||
device = self._op_mode_device
|
||||
if "setOperatingMode" in device.actions:
|
||||
device.execute_action("setOperatingMode", [HA_OPMODES_HVAC[hvac_mode]])
|
||||
elif "setThermostatMode" in device.actions:
|
||||
if "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setOperatingMode", HA_OPMODES_HVAC[hvac_mode])
|
||||
elif "setThermostatMode" in self._op_mode_device.fibaro_device.actions:
|
||||
device = self._op_mode_device.fibaro_device
|
||||
if device.has_supported_thermostat_modes:
|
||||
for mode in device.supported_thermostat_modes:
|
||||
if mode.lower() == hvac_mode:
|
||||
device.execute_action("setThermostatMode", [mode])
|
||||
self._op_mode_device.action("setThermostatMode", mode)
|
||||
break
|
||||
elif "setMode" in device.actions:
|
||||
device.execute_action("setMode", [HA_OPMODES_HVAC[hvac_mode]])
|
||||
elif "setMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setMode", HA_OPMODES_HVAC[hvac_mode])
|
||||
|
||||
@property
|
||||
def hvac_action(self) -> HVACAction | None:
|
||||
@@ -299,7 +299,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return None
|
||||
|
||||
device = self._op_mode_device
|
||||
device = self._op_mode_device.fibaro_device
|
||||
if device.has_thermostat_operating_state:
|
||||
with suppress(ValueError):
|
||||
return HVACAction(device.thermostat_operating_state.lower())
|
||||
@@ -315,15 +315,15 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if not self._op_mode_device:
|
||||
return None
|
||||
|
||||
if self._op_mode_device.has_thermostat_mode:
|
||||
mode = self._op_mode_device.thermostat_mode
|
||||
if self._op_mode_device.fibaro_device.has_thermostat_mode:
|
||||
mode = self._op_mode_device.fibaro_device.thermostat_mode
|
||||
if self.preset_modes is not None and mode in self.preset_modes:
|
||||
return mode
|
||||
return None
|
||||
if self._op_mode_device.has_operating_mode:
|
||||
mode = self._op_mode_device.operating_mode
|
||||
if self._op_mode_device.fibaro_device.has_operating_mode:
|
||||
mode = self._op_mode_device.fibaro_device.operating_mode
|
||||
else:
|
||||
mode = self._op_mode_device.mode
|
||||
mode = self._op_mode_device.fibaro_device.mode
|
||||
|
||||
if mode not in OPMODES_PRESET:
|
||||
return None
|
||||
@@ -334,22 +334,20 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
if self._op_mode_device is None:
|
||||
return
|
||||
|
||||
if "setThermostatMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action("setThermostatMode", [preset_mode])
|
||||
elif "setOperatingMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action(
|
||||
"setOperatingMode", [HA_OPMODES_PRESET[preset_mode]]
|
||||
)
|
||||
elif "setMode" in self._op_mode_device.actions:
|
||||
self._op_mode_device.execute_action(
|
||||
"setMode", [HA_OPMODES_PRESET[preset_mode]]
|
||||
if "setThermostatMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setThermostatMode", preset_mode)
|
||||
elif "setOperatingMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action(
|
||||
"setOperatingMode", HA_OPMODES_PRESET[preset_mode]
|
||||
)
|
||||
elif "setMode" in self._op_mode_device.fibaro_device.actions:
|
||||
self._op_mode_device.action("setMode", HA_OPMODES_PRESET[preset_mode])
|
||||
|
||||
@property
|
||||
def current_temperature(self) -> float | None:
|
||||
"""Return the current temperature."""
|
||||
if self._temp_sensor_device:
|
||||
device = self._temp_sensor_device
|
||||
device = self._temp_sensor_device.fibaro_device
|
||||
if device.has_heating_thermostat_setpoint:
|
||||
return device.heating_thermostat_setpoint
|
||||
return device.value.float_value()
|
||||
@@ -359,7 +357,7 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
def target_temperature(self) -> float | None:
|
||||
"""Return the temperature we try to reach."""
|
||||
if self._target_temp_device:
|
||||
device = self._target_temp_device
|
||||
device = self._target_temp_device.fibaro_device
|
||||
if device.has_heating_thermostat_setpoint_future:
|
||||
return device.heating_thermostat_setpoint_future
|
||||
return device.target_level
|
||||
@@ -370,11 +368,9 @@ class FibaroThermostat(FibaroEntity, ClimateEntity):
|
||||
temperature = kwargs.get(ATTR_TEMPERATURE)
|
||||
target = self._target_temp_device
|
||||
if target is not None and temperature is not None:
|
||||
if "setThermostatSetpoint" in target.actions:
|
||||
target.execute_action(
|
||||
"setThermostatSetpoint", [self.fibaro_op_mode, temperature]
|
||||
)
|
||||
elif "setHeatingThermostatSetpoint" in target.actions:
|
||||
target.execute_action("setHeatingThermostatSetpoint", [temperature])
|
||||
if "setThermostatSetpoint" in target.fibaro_device.actions:
|
||||
target.action("setThermostatSetpoint", self.fibaro_op_mode, temperature)
|
||||
elif "setHeatingThermostatSetpoint" in target.fibaro_device.actions:
|
||||
target.action("setHeatingThermostatSetpoint", temperature)
|
||||
else:
|
||||
target.execute_action("setTargetLevel", [temperature])
|
||||
target.action("setTargetLevel", temperature)
|
||||
|
@@ -6,7 +6,6 @@ from collections.abc import Mapping
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from pyfibaro.fibaro_client import FibaroAuthenticationFailed, FibaroConnectFailed
|
||||
from slugify import slugify
|
||||
import voluptuous as vol
|
||||
|
||||
@@ -14,7 +13,7 @@ from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from . import connect_fibaro_client
|
||||
from . import FibaroAuthFailed, FibaroConnectFailed, init_controller
|
||||
from .const import CONF_IMPORT_PLUGINS, DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
@@ -34,16 +33,16 @@ async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> dict[str
|
||||
|
||||
Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user.
|
||||
"""
|
||||
info, _ = await hass.async_add_executor_job(connect_fibaro_client, data)
|
||||
controller = await hass.async_add_executor_job(init_controller, data)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Successfully connected to fibaro home center %s with name %s",
|
||||
info.serial_number,
|
||||
info.hc_name,
|
||||
controller.hub_serial,
|
||||
controller.hub_name,
|
||||
)
|
||||
return {
|
||||
"serial_number": slugify(info.serial_number),
|
||||
"name": info.hc_name,
|
||||
"serial_number": slugify(controller.hub_serial),
|
||||
"name": controller.hub_name,
|
||||
}
|
||||
|
||||
|
||||
@@ -76,7 +75,7 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
info = await _validate_input(self.hass, user_input)
|
||||
except FibaroConnectFailed:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FibaroAuthenticationFailed:
|
||||
except FibaroAuthFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
await self.async_set_unique_id(info["serial_number"])
|
||||
@@ -107,7 +106,7 @@ class FibaroConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
await _validate_input(self.hass, new_data)
|
||||
except FibaroConnectFailed:
|
||||
errors["base"] = "cannot_connect"
|
||||
except FibaroAuthenticationFailed:
|
||||
except FibaroAuthFailed:
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
return self.async_update_reload_and_abort(
|
||||
|
@@ -11,8 +11,6 @@ from pyfibaro.fibaro_device import DeviceModel
|
||||
from homeassistant.const import ATTR_ARMED, ATTR_BATTERY_LEVEL
|
||||
from homeassistant.helpers.entity import Entity
|
||||
|
||||
from . import FibaroController
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -24,7 +22,7 @@ class FibaroEntity(Entity):
|
||||
def __init__(self, fibaro_device: DeviceModel) -> None:
|
||||
"""Initialize the device."""
|
||||
self.fibaro_device = fibaro_device
|
||||
self.controller: FibaroController = fibaro_device.fibaro_controller
|
||||
self.controller = fibaro_device.fibaro_controller
|
||||
self.ha_id = fibaro_device.ha_id
|
||||
self._attr_name = fibaro_device.friendly_name
|
||||
self._attr_unique_id = fibaro_device.unique_id_str
|
||||
@@ -56,6 +54,15 @@ class FibaroEntity(Entity):
|
||||
return self.fibaro_device.value_2.int_value()
|
||||
return None
|
||||
|
||||
def dont_know_message(self, cmd: str) -> None:
|
||||
"""Make a warning in case we don't know how to perform an action."""
|
||||
_LOGGER.warning(
|
||||
"Not sure how to %s: %s (available actions: %s)",
|
||||
cmd,
|
||||
str(self.ha_id),
|
||||
str(self.fibaro_device.actions),
|
||||
)
|
||||
|
||||
def set_level(self, level: int) -> None:
|
||||
"""Set the level of Fibaro device."""
|
||||
self.action("setValue", level)
|
||||
@@ -90,7 +97,11 @@ class FibaroEntity(Entity):
|
||||
|
||||
def action(self, cmd: str, *args: Any) -> None:
|
||||
"""Perform an action on the Fibaro HC."""
|
||||
self.fibaro_device.execute_action(cmd, args)
|
||||
if cmd in self.fibaro_device.actions:
|
||||
self.fibaro_device.execute_action(cmd, args)
|
||||
_LOGGER.debug("-> %s.%s%s called", str(self.ha_id), str(cmd), str(args))
|
||||
else:
|
||||
self.dont_know_message(cmd)
|
||||
|
||||
@property
|
||||
def current_binary_state(self) -> bool:
|
||||
|
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["pyfibaro"],
|
||||
"requirements": ["pyfibaro==0.8.2"]
|
||||
"requirements": ["pyfibaro==0.8.0"]
|
||||
}
|
||||
|
@@ -1 +0,0 @@
|
||||
"""FrankEver virtual integration."""
|
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "frankever",
|
||||
"name": "FrankEver",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "shelly"
|
||||
}
|
@@ -20,5 +20,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/frontend",
|
||||
"integration_type": "system",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20250228.0"]
|
||||
"requirements": ["home-assistant-frontend==20250221.0"]
|
||||
}
|
||||
|
@@ -45,7 +45,7 @@
|
||||
},
|
||||
"mode": {
|
||||
"name": "[%key:common::config_flow::data::mode%]",
|
||||
"description": "The zone's operating mode."
|
||||
"description": "One of: off, timer or footprint."
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@@ -111,20 +111,9 @@ def _format_schema(schema: dict[str, Any]) -> Schema:
|
||||
continue
|
||||
if key == "any_of":
|
||||
val = [_format_schema(subschema) for subschema in val]
|
||||
elif key == "type":
|
||||
if key == "type":
|
||||
val = val.upper()
|
||||
elif key == "format":
|
||||
# Gemini API does not support all formats, see: https://ai.google.dev/api/caching#Schema
|
||||
# formats that are not supported are ignored
|
||||
if schema.get("type") == "string" and val not in ("enum", "date-time"):
|
||||
continue
|
||||
if schema.get("type") == "number" and val not in ("float", "double"):
|
||||
continue
|
||||
if schema.get("type") == "integer" and val not in ("int32", "int64"):
|
||||
continue
|
||||
if schema.get("type") not in ("string", "number", "integer"):
|
||||
continue
|
||||
elif key == "items":
|
||||
if key == "items":
|
||||
val = _format_schema(val)
|
||||
elif key == "properties":
|
||||
val = {k: _format_schema(v) for k, v in val.items()}
|
||||
|
@@ -20,4 +20,3 @@ MAX_ERRORS = 2
|
||||
TARGET_TEMPERATURE_STEP = 1
|
||||
|
||||
UPDATE_INTERVAL = 60
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL = UPDATE_INTERVAL * 2
|
||||
|
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import copy
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
@@ -25,7 +24,6 @@ from .const import (
|
||||
DISPATCH_DEVICE_DISCOVERED,
|
||||
DOMAIN,
|
||||
MAX_ERRORS,
|
||||
MAX_EXPECTED_RESPONSE_TIME_INTERVAL,
|
||||
UPDATE_INTERVAL,
|
||||
)
|
||||
|
||||
@@ -50,6 +48,7 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
always_update=False,
|
||||
)
|
||||
self.device = device
|
||||
self.device.add_handler(Response.DATA, self.device_state_updated)
|
||||
self.device.add_handler(Response.RESULT, self.device_state_updated)
|
||||
|
||||
self._error_count: int = 0
|
||||
@@ -89,9 +88,7 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
# raise update failed if time for more than MAX_ERRORS has passed since last update
|
||||
now = utcnow()
|
||||
elapsed_success = now - self._last_response_time
|
||||
if self.update_interval and elapsed_success >= timedelta(
|
||||
seconds=MAX_EXPECTED_RESPONSE_TIME_INTERVAL
|
||||
):
|
||||
if self.update_interval and elapsed_success >= self.update_interval:
|
||||
if not self._last_error_time or (
|
||||
(now - self.update_interval) >= self._last_error_time
|
||||
):
|
||||
@@ -99,19 +96,16 @@ class DeviceDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]):
|
||||
self._error_count += 1
|
||||
|
||||
_LOGGER.warning(
|
||||
"Device %s took an unusually long time to respond, %s seconds",
|
||||
"Device %s is unresponsive for %s seconds",
|
||||
self.name,
|
||||
elapsed_success,
|
||||
)
|
||||
else:
|
||||
self._error_count = 0
|
||||
if self.last_update_success and self._error_count >= MAX_ERRORS:
|
||||
raise UpdateFailed(
|
||||
f"Device {self.name} is unresponsive for too long and now unavailable"
|
||||
)
|
||||
|
||||
self._last_response_time = utcnow()
|
||||
return copy.deepcopy(self.device.raw_properties)
|
||||
return self.device.raw_properties
|
||||
|
||||
async def push_state_update(self):
|
||||
"""Send state updates to the physical device."""
|
||||
|
@@ -26,7 +26,6 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="todayEnergy",
|
||||
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
|
||||
device_class=SensorDeviceClass.ENERGY,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_output_power",
|
||||
@@ -34,7 +33,6 @@ TOTAL_SENSOR_TYPES: tuple[GrowattSensorEntityDescription, ...] = (
|
||||
api_key="invTodayPpv",
|
||||
native_unit_of_measurement=UnitOfPower.WATT,
|
||||
device_class=SensorDeviceClass.POWER,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
),
|
||||
GrowattSensorEntityDescription(
|
||||
key="total_energy_output",
|
||||
|
@@ -40,10 +40,6 @@ ATTR_ALIAS = "alias"
|
||||
ATTR_PRIORITY = "priority"
|
||||
ATTR_COST = "cost"
|
||||
ATTR_NOTES = "notes"
|
||||
ATTR_UP_DOWN = "up_down"
|
||||
ATTR_FREQUENCY = "frequency"
|
||||
ATTR_COUNTER_UP = "counter_up"
|
||||
ATTR_COUNTER_DOWN = "counter_down"
|
||||
|
||||
SERVICE_CAST_SKILL = "cast_skill"
|
||||
SERVICE_START_QUEST = "start_quest"
|
||||
@@ -60,8 +56,6 @@ SERVICE_SCORE_REWARD = "score_reward"
|
||||
SERVICE_TRANSFORMATION = "transformation"
|
||||
|
||||
SERVICE_UPDATE_REWARD = "update_reward"
|
||||
SERVICE_CREATE_REWARD = "create_reward"
|
||||
SERVICE_UPDATE_HABIT = "update_habit"
|
||||
|
||||
DEVELOPER_ID = "4c4ca53f-c059-4ffa-966e-9d29dd405daf"
|
||||
X_CLIENT = f"{DEVELOPER_ID} - {APPLICATION_NAME} {__version__}"
|
||||
|
@@ -224,19 +224,6 @@
|
||||
"tag_options": "mdi:tag",
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
},
|
||||
"create_reward": {
|
||||
"service": "mdi:treasure-chest-outline",
|
||||
"sections": {
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
},
|
||||
"update_habit": {
|
||||
"service": "mdi:contrast-box",
|
||||
"sections": {
|
||||
"tag_options": "mdi:tag",
|
||||
"developer_options": "mdi:test-tube"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -10,7 +10,6 @@ from uuid import UUID
|
||||
from aiohttp import ClientError
|
||||
from habiticalib import (
|
||||
Direction,
|
||||
Frequency,
|
||||
HabiticaException,
|
||||
NotAuthorizedError,
|
||||
NotFoundError,
|
||||
@@ -42,11 +41,8 @@ from .const import (
|
||||
ATTR_ARGS,
|
||||
ATTR_CONFIG_ENTRY,
|
||||
ATTR_COST,
|
||||
ATTR_COUNTER_DOWN,
|
||||
ATTR_COUNTER_UP,
|
||||
ATTR_DATA,
|
||||
ATTR_DIRECTION,
|
||||
ATTR_FREQUENCY,
|
||||
ATTR_ITEM,
|
||||
ATTR_KEYWORD,
|
||||
ATTR_NOTES,
|
||||
@@ -58,7 +54,6 @@ from .const import (
|
||||
ATTR_TARGET,
|
||||
ATTR_TASK,
|
||||
ATTR_TYPE,
|
||||
ATTR_UP_DOWN,
|
||||
DOMAIN,
|
||||
EVENT_API_CALL_SUCCESS,
|
||||
SERVICE_ABORT_QUEST,
|
||||
@@ -66,7 +61,6 @@ from .const import (
|
||||
SERVICE_API_CALL,
|
||||
SERVICE_CANCEL_QUEST,
|
||||
SERVICE_CAST_SKILL,
|
||||
SERVICE_CREATE_REWARD,
|
||||
SERVICE_GET_TASKS,
|
||||
SERVICE_LEAVE_QUEST,
|
||||
SERVICE_REJECT_QUEST,
|
||||
@@ -74,7 +68,6 @@ from .const import (
|
||||
SERVICE_SCORE_REWARD,
|
||||
SERVICE_START_QUEST,
|
||||
SERVICE_TRANSFORMATION,
|
||||
SERVICE_UPDATE_HABIT,
|
||||
SERVICE_UPDATE_REWARD,
|
||||
)
|
||||
from .coordinator import HabiticaConfigEntry
|
||||
@@ -119,36 +112,18 @@ SERVICE_TRANSFORMATION_SCHEMA = vol.Schema(
|
||||
}
|
||||
)
|
||||
|
||||
BASE_TASK_SCHEMA = vol.Schema(
|
||||
SERVICE_UPDATE_TASK_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(ATTR_CONFIG_ENTRY): ConfigEntrySelector(),
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_RENAME): cv.string,
|
||||
vol.Optional(ATTR_NOTES): cv.string,
|
||||
vol.Optional(ATTR_TAG): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_REMOVE_TAG): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_ALIAS): vol.All(
|
||||
cv.string, cv.matches_regex("^[a-zA-Z0-9-_]*$")
|
||||
),
|
||||
vol.Optional(ATTR_COST): vol.All(vol.Coerce(float), vol.Range(0)),
|
||||
vol.Optional(ATTR_PRIORITY): vol.All(
|
||||
vol.Upper, vol.In(TaskPriority._member_names_)
|
||||
),
|
||||
vol.Optional(ATTR_UP_DOWN): vol.All(cv.ensure_list, [str]),
|
||||
vol.Optional(ATTR_COUNTER_UP): vol.All(int, vol.Range(0)),
|
||||
vol.Optional(ATTR_COUNTER_DOWN): vol.All(int, vol.Range(0)),
|
||||
vol.Optional(ATTR_FREQUENCY): vol.Coerce(Frequency),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_UPDATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(ATTR_TASK): cv.string,
|
||||
vol.Optional(ATTR_REMOVE_TAG): vol.All(cv.ensure_list, [str]),
|
||||
}
|
||||
)
|
||||
|
||||
SERVICE_CREATE_TASK_SCHEMA = BASE_TASK_SCHEMA.extend(
|
||||
{
|
||||
vol.Required(ATTR_NAME): cv.string,
|
||||
vol.Optional(ATTR_COST): vol.Coerce(float),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -186,12 +161,6 @@ ITEMID_MAP = {
|
||||
"shiny_seed": Skill.SHINY_SEED,
|
||||
}
|
||||
|
||||
SERVICE_TASK_TYPE_MAP = {
|
||||
SERVICE_UPDATE_REWARD: TaskType.REWARD,
|
||||
SERVICE_CREATE_REWARD: TaskType.REWARD,
|
||||
SERVICE_UPDATE_HABIT: TaskType.HABIT,
|
||||
}
|
||||
|
||||
|
||||
def get_config_entry(hass: HomeAssistant, entry_id: str) -> HabiticaConfigEntry:
|
||||
"""Return config entry or raise if not found or not loaded."""
|
||||
@@ -570,36 +539,33 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
|
||||
return result
|
||||
|
||||
async def create_or_update_task(call: ServiceCall) -> ServiceResponse: # noqa: C901
|
||||
"""Create or update task action."""
|
||||
async def update_task(call: ServiceCall) -> ServiceResponse:
|
||||
"""Update task action."""
|
||||
entry = get_config_entry(hass, call.data[ATTR_CONFIG_ENTRY])
|
||||
coordinator = entry.runtime_data
|
||||
await coordinator.async_refresh()
|
||||
is_update = call.service in (SERVICE_UPDATE_REWARD, SERVICE_UPDATE_HABIT)
|
||||
current_task = None
|
||||
|
||||
if is_update:
|
||||
try:
|
||||
current_task = next(
|
||||
task
|
||||
for task in coordinator.data.tasks
|
||||
if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text)
|
||||
and task.Type is SERVICE_TASK_TYPE_MAP[call.service]
|
||||
)
|
||||
except StopIteration as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="task_not_found",
|
||||
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
|
||||
) from e
|
||||
try:
|
||||
current_task = next(
|
||||
task
|
||||
for task in coordinator.data.tasks
|
||||
if call.data[ATTR_TASK] in (str(task.id), task.alias, task.text)
|
||||
and task.Type is TaskType.REWARD
|
||||
)
|
||||
except StopIteration as e:
|
||||
raise ServiceValidationError(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="task_not_found",
|
||||
translation_placeholders={"task": f"'{call.data[ATTR_TASK]}'"},
|
||||
) from e
|
||||
|
||||
task_id = current_task.id
|
||||
if TYPE_CHECKING:
|
||||
assert task_id
|
||||
data = Task()
|
||||
|
||||
if not is_update:
|
||||
data["type"] = TaskType.REWARD
|
||||
|
||||
if (text := call.data.get(ATTR_RENAME)) or (text := call.data.get(ATTR_NAME)):
|
||||
data["text"] = text
|
||||
if rename := call.data.get(ATTR_RENAME):
|
||||
data["text"] = rename
|
||||
|
||||
if (notes := call.data.get(ATTR_NOTES)) is not None:
|
||||
data["notes"] = notes
|
||||
@@ -608,7 +574,7 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
remove_tags = cast(list[str], call.data.get(ATTR_REMOVE_TAG))
|
||||
|
||||
if tags or remove_tags:
|
||||
update_tags = set(current_task.tags) if current_task else set()
|
||||
update_tags = set(current_task.tags)
|
||||
user_tags = {
|
||||
tag.name.lower(): tag.id
|
||||
for tag in coordinator.data.user.tags
|
||||
@@ -667,30 +633,8 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
if (cost := call.data.get(ATTR_COST)) is not None:
|
||||
data["value"] = cost
|
||||
|
||||
if priority := call.data.get(ATTR_PRIORITY):
|
||||
data["priority"] = TaskPriority[priority]
|
||||
|
||||
if frequency := call.data.get(ATTR_FREQUENCY):
|
||||
data["frequency"] = frequency
|
||||
|
||||
if up_down := call.data.get(ATTR_UP_DOWN):
|
||||
data["up"] = "up" in up_down
|
||||
data["down"] = "down" in up_down
|
||||
|
||||
if counter_up := call.data.get(ATTR_COUNTER_UP):
|
||||
data["counterUp"] = counter_up
|
||||
|
||||
if counter_down := call.data.get(ATTR_COUNTER_DOWN):
|
||||
data["counterDown"] = counter_down
|
||||
|
||||
try:
|
||||
if is_update:
|
||||
if TYPE_CHECKING:
|
||||
assert current_task
|
||||
assert current_task.id
|
||||
response = await coordinator.habitica.update_task(current_task.id, data)
|
||||
else:
|
||||
response = await coordinator.habitica.create_task(data)
|
||||
response = await coordinator.habitica.update_task(task_id, data)
|
||||
except TooManyRequestsError as e:
|
||||
raise HomeAssistantError(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -715,24 +659,10 @@ def async_setup_services(hass: HomeAssistant) -> None: # noqa: C901
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE_REWARD,
|
||||
create_or_update_task,
|
||||
update_task,
|
||||
schema=SERVICE_UPDATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_UPDATE_HABIT,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_UPDATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_CREATE_REWARD,
|
||||
create_or_update_task,
|
||||
schema=SERVICE_CREATE_TASK_SCHEMA,
|
||||
supports_response=SupportsResponse.ONLY,
|
||||
)
|
||||
hass.services.async_register(
|
||||
DOMAIN,
|
||||
SERVICE_API_CALL,
|
||||
|
@@ -144,26 +144,26 @@ update_reward:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
rename: &rename
|
||||
rename:
|
||||
selector:
|
||||
text:
|
||||
notes: ¬es
|
||||
notes:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
multiline: true
|
||||
cost:
|
||||
required: false
|
||||
selector: &cost_selector
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
step: 0.01
|
||||
unit_of_measurement: "🪙"
|
||||
mode: box
|
||||
tag_options: &tag_options
|
||||
tag_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
tag: &tag
|
||||
tag:
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
@@ -173,82 +173,10 @@ update_reward:
|
||||
selector:
|
||||
text:
|
||||
multiple: true
|
||||
developer_options: &developer_options
|
||||
collapsed: true
|
||||
fields:
|
||||
alias: &alias
|
||||
required: false
|
||||
selector:
|
||||
text:
|
||||
create_reward:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
name:
|
||||
required: true
|
||||
selector:
|
||||
text:
|
||||
notes: *notes
|
||||
cost:
|
||||
required: true
|
||||
selector: *cost_selector
|
||||
tag: *tag
|
||||
developer_options: *developer_options
|
||||
update_habit:
|
||||
fields:
|
||||
config_entry: *config_entry
|
||||
task: *task
|
||||
rename: *rename
|
||||
notes: *notes
|
||||
up_down:
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- value: up
|
||||
label: "➕"
|
||||
- value: down
|
||||
label: "➖"
|
||||
multiple: true
|
||||
mode: list
|
||||
priority:
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "trivial"
|
||||
- "easy"
|
||||
- "medium"
|
||||
- "hard"
|
||||
mode: dropdown
|
||||
translation_key: "priority"
|
||||
frequency:
|
||||
required: false
|
||||
selector:
|
||||
select:
|
||||
options:
|
||||
- "daily"
|
||||
- "weekly"
|
||||
- "monthly"
|
||||
translation_key: "frequency"
|
||||
mode: dropdown
|
||||
tag_options: *tag_options
|
||||
developer_options:
|
||||
collapsed: true
|
||||
fields:
|
||||
counter_up:
|
||||
alias:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
step: 1
|
||||
unit_of_measurement: "➕"
|
||||
mode: box
|
||||
counter_down:
|
||||
required: false
|
||||
selector:
|
||||
number:
|
||||
min: 0
|
||||
step: 1
|
||||
unit_of_measurement: "➖"
|
||||
mode: box
|
||||
alias: *alias
|
||||
text:
|
||||
|
@@ -23,9 +23,7 @@
|
||||
"developer_options_name": "Advanced settings",
|
||||
"developer_options_description": "Additional features available in developer mode.",
|
||||
"tag_options_name": "Tags",
|
||||
"tag_options_description": "Add or remove tags from a task.",
|
||||
"name_description": "The title for the Habitica task.",
|
||||
"cost_name": "Cost"
|
||||
"tag_options_description": "Add or remove tags from a task."
|
||||
},
|
||||
"config": {
|
||||
"abort": {
|
||||
@@ -709,7 +707,7 @@
|
||||
"description": "[%key:component::habitica::common::alias_description%]"
|
||||
},
|
||||
"cost": {
|
||||
"name": "[%key:component::habitica::common::cost_name%]",
|
||||
"name": "Cost",
|
||||
"description": "Update the cost of a reward."
|
||||
}
|
||||
},
|
||||
@@ -723,106 +721,6 @@
|
||||
"description": "[%key:component::habitica::common::developer_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"create_reward": {
|
||||
"name": "Create reward",
|
||||
"description": "Adds a new custom reward.",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Select the Habitica account to create a reward."
|
||||
},
|
||||
"name": {
|
||||
"name": "[%key:component::habitica::common::task_name%]",
|
||||
"description": "[%key:component::habitica::common::name_description%]"
|
||||
},
|
||||
"notes": {
|
||||
"name": "[%key:component::habitica::common::notes_name%]",
|
||||
"description": "[%key:component::habitica::common::notes_description%]"
|
||||
},
|
||||
"tag": {
|
||||
"name": "[%key:component::habitica::common::tag_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_description%]"
|
||||
},
|
||||
"alias": {
|
||||
"name": "[%key:component::habitica::common::alias_name%]",
|
||||
"description": "[%key:component::habitica::common::alias_description%]"
|
||||
},
|
||||
"cost": {
|
||||
"name": "[%key:component::habitica::common::cost_name%]",
|
||||
"description": "The cost of the reward."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"developer_options": {
|
||||
"name": "[%key:component::habitica::common::developer_options_name%]",
|
||||
"description": "[%key:component::habitica::common::developer_options_description%]"
|
||||
}
|
||||
}
|
||||
},
|
||||
"update_habit": {
|
||||
"name": "Update a habit",
|
||||
"description": "Updates a specific habit for the selected Habitica character",
|
||||
"fields": {
|
||||
"config_entry": {
|
||||
"name": "[%key:component::habitica::common::config_entry_name%]",
|
||||
"description": "Select the Habitica account to update a habit."
|
||||
},
|
||||
"task": {
|
||||
"name": "[%key:component::habitica::common::task_name%]",
|
||||
"description": "[%key:component::habitica::common::task_description%]"
|
||||
},
|
||||
"rename": {
|
||||
"name": "[%key:component::habitica::common::rename_name%]",
|
||||
"description": "[%key:component::habitica::common::rename_description%]"
|
||||
},
|
||||
"notes": {
|
||||
"name": "[%key:component::habitica::common::notes_name%]",
|
||||
"description": "[%key:component::habitica::common::notes_description%]"
|
||||
},
|
||||
"tag": {
|
||||
"name": "[%key:component::habitica::common::tag_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_description%]"
|
||||
},
|
||||
"remove_tag": {
|
||||
"name": "[%key:component::habitica::common::remove_tag_name%]",
|
||||
"description": "[%key:component::habitica::common::remove_tag_description%]"
|
||||
},
|
||||
"alias": {
|
||||
"name": "[%key:component::habitica::common::alias_name%]",
|
||||
"description": "[%key:component::habitica::common::alias_description%]"
|
||||
},
|
||||
"priority": {
|
||||
"name": "Difficulty",
|
||||
"description": "Update the difficulty of a task."
|
||||
},
|
||||
"frequency": {
|
||||
"name": "Counter reset",
|
||||
"description": "Update when a habit's counter resets: daily resets at the start of a new day, weekly after Sunday night, and monthly at the beginning of a new month."
|
||||
},
|
||||
"up_down": {
|
||||
"name": "Rewards or losses",
|
||||
"description": "Update if the habit is good and rewarding (positive), bad and penalizing (negative), or both."
|
||||
},
|
||||
"counter_up": {
|
||||
"name": "Adjust positive counter",
|
||||
"description": "Update the up counter of a positive habit."
|
||||
},
|
||||
"counter_down": {
|
||||
"name": "Adjust negative counter",
|
||||
"description": "Update the down counter of a negative habit."
|
||||
}
|
||||
},
|
||||
"sections": {
|
||||
"tag_options": {
|
||||
"name": "[%key:component::habitica::common::tag_options_name%]",
|
||||
"description": "[%key:component::habitica::common::tag_options_description%]"
|
||||
},
|
||||
"developer_options": {
|
||||
"name": "[%key:component::habitica::common::developer_options_name%]",
|
||||
"description": "[%key:component::habitica::common::developer_options_description%]"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"selector": {
|
||||
@@ -857,14 +755,6 @@
|
||||
"medium": "Medium",
|
||||
"hard": "Hard"
|
||||
}
|
||||
},
|
||||
"frequency": {
|
||||
"options": {
|
||||
"daily": "Daily",
|
||||
"weekly": "Weekly",
|
||||
"monthly": "Monthly",
|
||||
"yearly": "Yearly"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -72,27 +72,22 @@ def _handle_paired_or_connected_appliance(
|
||||
for entity in get_option_entities_for_appliance(entry, appliance)
|
||||
if entity.unique_id not in known_entity_unique_ids
|
||||
)
|
||||
for event_key in (
|
||||
EventKey.BSH_COMMON_ROOT_ACTIVE_PROGRAM,
|
||||
EventKey.BSH_COMMON_ROOT_SELECTED_PROGRAM,
|
||||
):
|
||||
changed_options_listener_remove_callback = (
|
||||
entry.runtime_data.async_add_listener(
|
||||
partial(
|
||||
_create_option_entities,
|
||||
entry,
|
||||
appliance,
|
||||
known_entity_unique_ids,
|
||||
get_option_entities_for_appliance,
|
||||
async_add_entities,
|
||||
),
|
||||
(appliance.info.ha_id, event_key),
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(changed_options_listener_remove_callback)
|
||||
changed_options_listener_remove_callbacks[appliance.info.ha_id].append(
|
||||
changed_options_listener_remove_callback
|
||||
changed_options_listener_remove_callback = (
|
||||
entry.runtime_data.async_add_listener(
|
||||
partial(
|
||||
_create_option_entities,
|
||||
entry,
|
||||
appliance,
|
||||
known_entity_unique_ids,
|
||||
get_option_entities_for_appliance,
|
||||
async_add_entities,
|
||||
),
|
||||
)
|
||||
)
|
||||
entry.async_on_unload(changed_options_listener_remove_callback)
|
||||
changed_options_listener_remove_callbacks[appliance.info.ha_id].append(
|
||||
changed_options_listener_remove_callback
|
||||
)
|
||||
known_entity_unique_ids.update(
|
||||
{
|
||||
cast(str, entity.unique_id): appliance.info.ha_id
|
||||
|
@@ -4,8 +4,6 @@ from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey, OptionKey, ProgramKey, SettingKey, StatusKey
|
||||
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime, UnitOfVolume
|
||||
|
||||
from .utils import bsh_key_to_translation_key
|
||||
|
||||
DOMAIN = "home_connect"
|
||||
@@ -23,13 +21,6 @@ APPLIANCES_WITH_PROGRAMS = (
|
||||
"WasherDryer",
|
||||
)
|
||||
|
||||
UNIT_MAP = {
|
||||
"seconds": UnitOfTime.SECONDS,
|
||||
"ml": UnitOfVolume.MILLILITERS,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
}
|
||||
|
||||
|
||||
BSH_POWER_ON = "BSH.Common.EnumType.PowerState.On"
|
||||
BSH_POWER_OFF = "BSH.Common.EnumType.PowerState.Off"
|
||||
|
@@ -440,27 +440,13 @@ class HomeConnectCoordinator(
|
||||
self, ha_id: str, program_key: ProgramKey
|
||||
) -> dict[OptionKey, ProgramDefinitionOption]:
|
||||
"""Get options with constraints for appliance."""
|
||||
if program_key is ProgramKey.UNKNOWN:
|
||||
return {}
|
||||
try:
|
||||
return {
|
||||
option.key: option
|
||||
for option in (
|
||||
await self.client.get_available_program(
|
||||
ha_id, program_key=program_key
|
||||
)
|
||||
).options
|
||||
or []
|
||||
}
|
||||
except HomeConnectError as error:
|
||||
_LOGGER.debug(
|
||||
"Error fetching options for %s: %s",
|
||||
ha_id,
|
||||
error
|
||||
if isinstance(error, HomeConnectApiError)
|
||||
else type(error).__name__,
|
||||
)
|
||||
return {}
|
||||
return {
|
||||
option.key: option
|
||||
for option in (
|
||||
await self.client.get_available_program(ha_id, program_key=program_key)
|
||||
).options
|
||||
or []
|
||||
}
|
||||
|
||||
async def update_options(
|
||||
self, ha_id: str, event_key: EventKey, program_key: ProgramKey
|
||||
@@ -470,7 +456,8 @@ class HomeConnectCoordinator(
|
||||
events = self.data[ha_id].events
|
||||
options_to_notify = options.copy()
|
||||
options.clear()
|
||||
options.update(await self.get_options_definitions(ha_id, program_key))
|
||||
if program_key is not ProgramKey.UNKNOWN:
|
||||
options.update(await self.get_options_definitions(ha_id, program_key))
|
||||
|
||||
for option in options.values():
|
||||
option_value = option.constraints.default if option.constraints else None
|
||||
|
@@ -49,23 +49,6 @@
|
||||
"default": "mdi:map-marker-remove-variant"
|
||||
}
|
||||
},
|
||||
"button": {
|
||||
"open_door": {
|
||||
"default": "mdi:door-open"
|
||||
},
|
||||
"partly_open_door": {
|
||||
"default": "mdi:door-open"
|
||||
},
|
||||
"pause_program": {
|
||||
"default": "mdi:pause"
|
||||
},
|
||||
"resume_program": {
|
||||
"default": "mdi:play"
|
||||
},
|
||||
"stop_program": {
|
||||
"default": "mdi:stop"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"operation_state": {
|
||||
"default": "mdi:state-machine",
|
||||
|
@@ -7,6 +7,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/home_connect",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["aiohomeconnect"],
|
||||
"requirements": ["aiohomeconnect==0.15.1"],
|
||||
"requirements": ["aiohomeconnect==0.15.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -11,6 +11,7 @@ from homeassistant.components.number import (
|
||||
NumberEntity,
|
||||
NumberEntityDescription,
|
||||
)
|
||||
from homeassistant.const import UnitOfTemperature, UnitOfTime, UnitOfVolume
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
@@ -22,7 +23,6 @@ from .const import (
|
||||
SVE_TRANSLATION_PLACEHOLDER_ENTITY_ID,
|
||||
SVE_TRANSLATION_PLACEHOLDER_KEY,
|
||||
SVE_TRANSLATION_PLACEHOLDER_VALUE,
|
||||
UNIT_MAP,
|
||||
)
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity, HomeConnectOptionEntity
|
||||
@@ -32,6 +32,13 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PARALLEL_UPDATES = 1
|
||||
|
||||
UNIT_MAP = {
|
||||
"seconds": UnitOfTime.SECONDS,
|
||||
"ml": UnitOfVolume.MILLILITERS,
|
||||
"°C": UnitOfTemperature.CELSIUS,
|
||||
"°F": UnitOfTemperature.FAHRENHEIT,
|
||||
}
|
||||
|
||||
NUMBERS = (
|
||||
NumberEntityDescription(
|
||||
key=SettingKey.REFRIGERATION_FRIDGE_FREEZER_SETPOINT_TEMPERATURE_REFRIGERATOR,
|
||||
|
@@ -1,12 +1,10 @@
|
||||
"""Provides a sensor for Home Connect."""
|
||||
|
||||
import contextlib
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from typing import cast
|
||||
|
||||
from aiohomeconnect.model import EventKey, StatusKey
|
||||
from aiohomeconnect.model.error import HomeConnectError
|
||||
|
||||
from homeassistant.components.sensor import (
|
||||
SensorDeviceClass,
|
||||
@@ -25,7 +23,6 @@ from .const import (
|
||||
BSH_OPERATION_STATE_FINISHED,
|
||||
BSH_OPERATION_STATE_PAUSE,
|
||||
BSH_OPERATION_STATE_RUN,
|
||||
UNIT_MAP,
|
||||
)
|
||||
from .coordinator import HomeConnectApplianceData, HomeConnectConfigEntry
|
||||
from .entity import HomeConnectEntity
|
||||
@@ -43,7 +40,6 @@ class HomeConnectSensorEntityDescription(
|
||||
|
||||
default_value: str | None = None
|
||||
appliance_types: tuple[str, ...] | None = None
|
||||
fetch_unit: bool = False
|
||||
|
||||
|
||||
BSH_PROGRAM_SENSORS = (
|
||||
@@ -187,8 +183,7 @@ SENSORS = (
|
||||
key=StatusKey.COOKING_OVEN_CURRENT_CAVITY_TEMPERATURE,
|
||||
device_class=SensorDeviceClass.TEMPERATURE,
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
translation_key="oven_current_cavity_temperature",
|
||||
fetch_unit=True,
|
||||
translation_key="current_cavity_temperature",
|
||||
),
|
||||
)
|
||||
|
||||
@@ -323,29 +318,6 @@ class HomeConnectSensor(HomeConnectEntity, SensorEntity):
|
||||
case _:
|
||||
self._attr_native_value = status
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
if self.entity_description.fetch_unit:
|
||||
data = self.appliance.status[cast(StatusKey, self.bsh_key)]
|
||||
if data.unit:
|
||||
self._attr_native_unit_of_measurement = UNIT_MAP.get(
|
||||
data.unit, data.unit
|
||||
)
|
||||
else:
|
||||
await self.fetch_unit()
|
||||
|
||||
async def fetch_unit(self) -> None:
|
||||
"""Fetch the unit of measurement."""
|
||||
with contextlib.suppress(HomeConnectError):
|
||||
data = await self.coordinator.client.get_status_value(
|
||||
self.appliance.info.ha_id, status_key=cast(StatusKey, self.bsh_key)
|
||||
)
|
||||
if data.unit:
|
||||
self._attr_native_unit_of_measurement = UNIT_MAP.get(
|
||||
data.unit, data.unit
|
||||
)
|
||||
|
||||
|
||||
class HomeConnectProgramSensor(HomeConnectSensor):
|
||||
"""Sensor class for Home Connect sensors that reports information related to the running program."""
|
||||
|
@@ -354,7 +354,7 @@
|
||||
"options": {
|
||||
"consumer_products_coffee_maker_enum_type_flow_rate_normal": "Normal",
|
||||
"consumer_products_coffee_maker_enum_type_flow_rate_intense": "Intense",
|
||||
"consumer_products_coffee_maker_enum_type_flow_rate_intense_plus": "Intense +"
|
||||
"consumer_products_coffee_maker_enum_type_flow_rate_intense_plus": "Intense plus"
|
||||
}
|
||||
},
|
||||
"coffee_milk_ratio": {
|
||||
@@ -410,7 +410,7 @@
|
||||
"laundry_care_dryer_enum_type_drying_target_iron_dry": "Iron dry",
|
||||
"laundry_care_dryer_enum_type_drying_target_gentle_dry": "Gentle dry",
|
||||
"laundry_care_dryer_enum_type_drying_target_cupboard_dry": "Cupboard dry",
|
||||
"laundry_care_dryer_enum_type_drying_target_cupboard_dry_plus": "Cupboard dry +",
|
||||
"laundry_care_dryer_enum_type_drying_target_cupboard_dry_plus": "Cupboard dry plus",
|
||||
"laundry_care_dryer_enum_type_drying_target_extra_dry": "Extra dry"
|
||||
}
|
||||
},
|
||||
@@ -592,7 +592,7 @@
|
||||
"description": "Defines if the program sequence is optimized with a special drying cycle to ensure more shine on glasses and plastic items."
|
||||
},
|
||||
"dishcare_dishwasher_option_vario_speed_plus": {
|
||||
"name": "Vario speed +",
|
||||
"name": "Vario speed plus",
|
||||
"description": "Defines if the program run time is reduced by up to 66% with the usual optimum cleaning and drying."
|
||||
},
|
||||
"dishcare_dishwasher_option_silence_on_demand": {
|
||||
@@ -608,7 +608,7 @@
|
||||
"description": "Defines if improved drying for glasses and plasticware is enabled."
|
||||
},
|
||||
"dishcare_dishwasher_option_hygiene_plus": {
|
||||
"name": "Hygiene +",
|
||||
"name": "Hygiene plus",
|
||||
"description": "Defines if the cleaning is done with increased temperature. This ensures maximum hygienic cleanliness for regular use."
|
||||
},
|
||||
"dishcare_dishwasher_option_eco_dry": {
|
||||
@@ -1462,7 +1462,7 @@
|
||||
"inactive": "Inactive",
|
||||
"ready": "Ready",
|
||||
"delayedstart": "Delayed start",
|
||||
"run": "Running",
|
||||
"run": "Run",
|
||||
"pause": "[%key:common::state::paused%]",
|
||||
"actionrequired": "Action required",
|
||||
"finished": "Finished",
|
||||
@@ -1529,8 +1529,8 @@
|
||||
"map3": "Map 3"
|
||||
}
|
||||
},
|
||||
"oven_current_cavity_temperature": {
|
||||
"name": "Current oven cavity temperature"
|
||||
"current_cavity_temperature": {
|
||||
"name": "Current cavity temperature"
|
||||
},
|
||||
"freezer_door_alarm": {
|
||||
"name": "Freezer door alarm",
|
||||
|
@@ -437,21 +437,18 @@ def ws_expose_entity(
|
||||
def ws_list_exposed_entities(
|
||||
hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict[str, Any]
|
||||
) -> None:
|
||||
"""List entities which are exposed to assistants."""
|
||||
"""Expose an entity to an assistant."""
|
||||
result: dict[str, Any] = {}
|
||||
|
||||
exposed_entities = hass.data[DATA_EXPOSED_ENTITIES]
|
||||
entity_registry = er.async_get(hass)
|
||||
for entity_id in chain(exposed_entities.entities, entity_registry.entities):
|
||||
exposed_to = {}
|
||||
result[entity_id] = {}
|
||||
entity_settings = async_get_entity_settings(hass, entity_id)
|
||||
for assistant, settings in entity_settings.items():
|
||||
if "should_expose" not in settings or not settings["should_expose"]:
|
||||
if "should_expose" not in settings:
|
||||
continue
|
||||
exposed_to[assistant] = True
|
||||
if not exposed_to:
|
||||
continue
|
||||
result[entity_id] = exposed_to
|
||||
result[entity_id][assistant] = settings["should_expose"]
|
||||
connection.send_result(msg["id"], {"exposed_entities": result})
|
||||
|
||||
|
||||
|
@@ -12,7 +12,7 @@
|
||||
},
|
||||
"imperial_unit_system": {
|
||||
"title": "The imperial unit system is deprecated",
|
||||
"description": "The imperial unit system is deprecated and your system is currently using US customary. Please update your configuration to use the US customary unit system and reload the Core configuration to fix this issue."
|
||||
"description": "The imperial unit system is deprecated and your system is currently using us customary. Please update your configuration to use the us customary unit system and reload the core configuration to fix this issue."
|
||||
},
|
||||
"deprecated_yaml": {
|
||||
"title": "The {integration_title} YAML configuration is being removed",
|
||||
@@ -111,8 +111,8 @@
|
||||
"description": "Checks the Home Assistant YAML-configuration files for errors. Errors will be shown in the Home Assistant logs."
|
||||
},
|
||||
"reload_core_config": {
|
||||
"name": "Reload Core configuration",
|
||||
"description": "Reloads the Core configuration from the YAML-configuration."
|
||||
"name": "Reload core configuration",
|
||||
"description": "Reloads the core configuration from the YAML-configuration."
|
||||
},
|
||||
"restart": {
|
||||
"name": "[%key:common::action::restart%]",
|
||||
@@ -160,7 +160,7 @@
|
||||
},
|
||||
"update_entity": {
|
||||
"name": "Update entity",
|
||||
"description": "Forces one or more entities to update their data.",
|
||||
"description": "Forces one or more entities to update its data.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entities to update",
|
||||
|
@@ -9,7 +9,7 @@
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"watchdog": {
|
||||
"watchdog_on_off": {
|
||||
"default": "mdi:dog"
|
||||
},
|
||||
"manual_operation": {
|
||||
|
@@ -154,6 +154,7 @@ class HKDevice:
|
||||
self._pending_subscribes: set[tuple[int, int]] = set()
|
||||
self._subscribe_timer: CALLBACK_TYPE | None = None
|
||||
self._load_platforms_lock = asyncio.Lock()
|
||||
self._full_update_requested: bool = False
|
||||
|
||||
@property
|
||||
def entity_map(self) -> Accessories:
|
||||
@@ -840,11 +841,48 @@ class HKDevice:
|
||||
|
||||
async def async_request_update(self, now: datetime | None = None) -> None:
|
||||
"""Request an debounced update from the accessory."""
|
||||
self._full_update_requested = True
|
||||
await self._debounced_update.async_call()
|
||||
|
||||
async def async_update(self, now: datetime | None = None) -> None:
|
||||
"""Poll state of all entities attached to this bridge/accessory."""
|
||||
to_poll = self.pollable_characteristics
|
||||
accessories = self.entity_map.accessories
|
||||
|
||||
if (
|
||||
not self._full_update_requested
|
||||
and len(accessories) == 1
|
||||
and self.available
|
||||
and not (to_poll - self.watchable_characteristics)
|
||||
and self.pairing.is_available
|
||||
and await self.pairing.controller.async_reachable(
|
||||
self.unique_id, timeout=5.0
|
||||
)
|
||||
):
|
||||
# If its a single accessory and all chars are watchable,
|
||||
# only poll the firmware version to keep the connection alive
|
||||
# https://github.com/home-assistant/core/issues/123412
|
||||
#
|
||||
# Firmware revision is used here since iOS does this to keep camera
|
||||
# connections alive, and the goal is to not regress
|
||||
# https://github.com/home-assistant/core/issues/116143
|
||||
# by polling characteristics that are not normally polled frequently
|
||||
# and may not be tested by the device vendor.
|
||||
#
|
||||
_LOGGER.debug(
|
||||
"Accessory is reachable, limiting poll to firmware version: %s",
|
||||
self.unique_id,
|
||||
)
|
||||
first_accessory = accessories[0]
|
||||
accessory_info = first_accessory.services.first(
|
||||
service_type=ServicesTypes.ACCESSORY_INFORMATION
|
||||
)
|
||||
assert accessory_info is not None
|
||||
firmware_iid = accessory_info[CharacteristicsTypes.FIRMWARE_REVISION].iid
|
||||
to_poll = {(first_accessory.aid, firmware_iid)}
|
||||
|
||||
self._full_update_requested = False
|
||||
|
||||
if not to_poll:
|
||||
self.async_update_available_state()
|
||||
_LOGGER.debug(
|
||||
|
@@ -14,6 +14,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/homekit_controller",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["aiohomekit", "commentjson"],
|
||||
"requirements": ["aiohomekit==3.2.8"],
|
||||
"requirements": ["aiohomekit==3.2.7"],
|
||||
"zeroconf": ["_hap._tcp.local.", "_hap._udp.local."]
|
||||
}
|
||||
|
@@ -94,12 +94,7 @@ async def async_setup_devices(bridge: HueBridge):
|
||||
add_device(hue_resource)
|
||||
|
||||
# create/update all current devices found in controllers
|
||||
# sort the devices to ensure bridges are added first
|
||||
hue_devices = list(dev_controller)
|
||||
hue_devices.sort(
|
||||
key=lambda dev: dev.metadata.archetype != DeviceArchetypes.BRIDGE_V2
|
||||
)
|
||||
known_devices = [add_device(hue_device) for hue_device in hue_devices]
|
||||
known_devices = [add_device(hue_device) for hue_device in dev_controller]
|
||||
known_devices += [add_device(hue_room) for hue_room in api.groups.room]
|
||||
known_devices += [add_device(hue_zone) for hue_zone in api.groups.zone]
|
||||
|
||||
|
@@ -46,7 +46,7 @@
|
||||
"services": {
|
||||
"update": {
|
||||
"name": "Update",
|
||||
"description": "Asks for a state update of all devices linked to an iCloud account.",
|
||||
"description": "Updates iCloud devices.",
|
||||
"fields": {
|
||||
"account": {
|
||||
"name": "Account",
|
||||
|
@@ -280,7 +280,7 @@ class ImapDataUpdateCoordinator(DataUpdateCoordinator[int | None]):
|
||||
if self.custom_event_template is not None:
|
||||
try:
|
||||
data["custom"] = self.custom_event_template.async_render(
|
||||
data | {"text": message.text}, parse_result=True
|
||||
data, parse_result=True
|
||||
)
|
||||
_LOGGER.debug(
|
||||
"IMAP custom template (%s) for msguid %s (%s) rendered to: %s, initial: %s",
|
||||
|
@@ -28,5 +28,5 @@
|
||||
"dependencies": ["bluetooth_adapters"],
|
||||
"documentation": "https://www.home-assistant.io/integrations/inkbird",
|
||||
"iot_class": "local_push",
|
||||
"requirements": ["inkbird-ble==0.7.1"]
|
||||
"requirements": ["inkbird-ble==0.7.0"]
|
||||
}
|
||||
|
@@ -111,7 +111,7 @@
|
||||
},
|
||||
"services": {
|
||||
"add_all_link": {
|
||||
"name": "Add All-Link",
|
||||
"name": "Add all link",
|
||||
"description": "Tells the Insteon Modem (IM) start All-Linking mode. Once the IM is in All-Linking mode, press the link button on the device to complete All-Linking.",
|
||||
"fields": {
|
||||
"group": {
|
||||
@@ -120,13 +120,13 @@
|
||||
},
|
||||
"mode": {
|
||||
"name": "[%key:common::config_flow::data::mode%]",
|
||||
"description": "Linking mode of the Insteon Modem."
|
||||
"description": "Linking mode controller - IM is controller responder - IM is responder."
|
||||
}
|
||||
}
|
||||
},
|
||||
"delete_all_link": {
|
||||
"name": "Delete All-Link",
|
||||
"description": "Tells the Insteon Modem (IM) to remove an All-Link record from the All-Link database of the IM and a device. Once the IM is set to delete the link, press the link button on the corresponding device to complete the process.",
|
||||
"name": "Delete all link",
|
||||
"description": "Tells the Insteon Modem (IM) to remove an All-Link record from the All-Link Database of the IM and a device. Once the IM is set to delete the link, press the link button on the corresponding device to complete the process.",
|
||||
"fields": {
|
||||
"group": {
|
||||
"name": "Group",
|
||||
@@ -135,8 +135,8 @@
|
||||
}
|
||||
},
|
||||
"load_all_link_database": {
|
||||
"name": "Load All-Link database",
|
||||
"description": "Loads the All-Link database for a device. WARNING - Loading a device All-Link database is very time consuming and inconsistent. This may take a LONG time and may need to be repeated to obtain all records.",
|
||||
"name": "Load all link database",
|
||||
"description": "Load the All-Link Database for a device. WARNING - Loading a device All-LInk database is very time consuming and inconsistent. This may take a LONG time and may need to be repeated to obtain all records.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
@@ -149,8 +149,8 @@
|
||||
}
|
||||
},
|
||||
"print_all_link_database": {
|
||||
"name": "Print All-Link database",
|
||||
"description": "Prints the All-Link database for a device. Requires that the All-Link database is loaded into memory.",
|
||||
"name": "Print all link database",
|
||||
"description": "Prints the All-Link Database for a device. Requires that the All-Link Database is loaded into memory.",
|
||||
"fields": {
|
||||
"entity_id": {
|
||||
"name": "Entity",
|
||||
@@ -159,8 +159,8 @@
|
||||
}
|
||||
},
|
||||
"print_im_all_link_database": {
|
||||
"name": "Print IM All-Link database",
|
||||
"description": "Prints the All-Link database for the INSTEON Modem (IM)."
|
||||
"name": "Print IM all link database",
|
||||
"description": "Prints the All-Link Database for the INSTEON Modem (IM)."
|
||||
},
|
||||
"x10_all_units_off": {
|
||||
"name": "X10 all units off",
|
||||
|
@@ -2,15 +2,13 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Collection
|
||||
import logging
|
||||
from typing import Any, Protocol
|
||||
|
||||
from aiohttp import web
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components import http, sensor
|
||||
from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN
|
||||
from homeassistant.components import http
|
||||
from homeassistant.components.cover import (
|
||||
ATTR_POSITION,
|
||||
DOMAIN as COVER_DOMAIN,
|
||||
@@ -41,12 +39,7 @@ from homeassistant.const import (
|
||||
SERVICE_TURN_ON,
|
||||
)
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, State
|
||||
from homeassistant.helpers import (
|
||||
area_registry as ar,
|
||||
config_validation as cv,
|
||||
integration_platform,
|
||||
intent,
|
||||
)
|
||||
from homeassistant.helpers import config_validation as cv, integration_platform, intent
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
from homeassistant.util import dt as dt_util
|
||||
|
||||
@@ -147,7 +140,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
intent.async_register(hass, GetCurrentDateIntentHandler())
|
||||
intent.async_register(hass, GetCurrentTimeIntentHandler())
|
||||
intent.async_register(hass, RespondIntentHandler())
|
||||
intent.async_register(hass, GetTemperatureIntent())
|
||||
|
||||
return True
|
||||
|
||||
@@ -452,109 +444,6 @@ class RespondIntentHandler(intent.IntentHandler):
|
||||
return response
|
||||
|
||||
|
||||
class GetTemperatureIntent(intent.IntentHandler):
|
||||
"""Handle GetTemperature intents."""
|
||||
|
||||
intent_type = intent.INTENT_GET_TEMPERATURE
|
||||
description = "Gets the current temperature of a climate device or entity"
|
||||
slot_schema = {
|
||||
vol.Optional("area"): intent.non_empty_string,
|
||||
vol.Optional("name"): intent.non_empty_string,
|
||||
vol.Optional("floor"): intent.non_empty_string,
|
||||
vol.Optional("preferred_area_id"): cv.string,
|
||||
vol.Optional("preferred_floor_id"): cv.string,
|
||||
}
|
||||
platforms = {CLIMATE_DOMAIN}
|
||||
|
||||
async def async_handle(self, intent_obj: intent.Intent) -> intent.IntentResponse:
|
||||
"""Handle the intent."""
|
||||
hass = intent_obj.hass
|
||||
slots = self.async_validate_slots(intent_obj.slots)
|
||||
|
||||
name: str | None = None
|
||||
if "name" in slots:
|
||||
name = slots["name"]["value"]
|
||||
|
||||
area: str | None = None
|
||||
if "area" in slots:
|
||||
area = slots["area"]["value"]
|
||||
|
||||
floor_name: str | None = None
|
||||
if "floor" in slots:
|
||||
floor_name = slots["floor"]["value"]
|
||||
|
||||
match_preferences = intent.MatchTargetsPreferences(
|
||||
area_id=slots.get("preferred_area_id", {}).get("value"),
|
||||
floor_id=slots.get("preferred_floor_id", {}).get("value"),
|
||||
)
|
||||
|
||||
if (not name) and (area or match_preferences.area_id):
|
||||
# Look for temperature sensors assigned to an area
|
||||
area_registry = ar.async_get(hass)
|
||||
area_temperature_ids: dict[str, str] = {}
|
||||
|
||||
# Keep candidates that are registered as area temperature sensors
|
||||
def area_candidate_filter(
|
||||
candidate: intent.MatchTargetsCandidate,
|
||||
possible_area_ids: Collection[str],
|
||||
) -> bool:
|
||||
for area_id in possible_area_ids:
|
||||
temperature_id = area_temperature_ids.get(area_id)
|
||||
if (temperature_id is None) and (
|
||||
area_entry := area_registry.async_get_area(area_id)
|
||||
):
|
||||
temperature_id = area_entry.temperature_entity_id or ""
|
||||
area_temperature_ids[area_id] = temperature_id
|
||||
|
||||
if candidate.state.entity_id == temperature_id:
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
area_name=area,
|
||||
floor_name=floor_name,
|
||||
domains=[sensor.DOMAIN],
|
||||
device_classes=[sensor.SensorDeviceClass.TEMPERATURE],
|
||||
assistant=intent_obj.assistant,
|
||||
single_target=True,
|
||||
)
|
||||
match_result = intent.async_match_targets(
|
||||
hass,
|
||||
match_constraints,
|
||||
match_preferences,
|
||||
area_candidate_filter=area_candidate_filter,
|
||||
)
|
||||
if match_result.is_match:
|
||||
# Found temperature sensor
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
# Look for climate devices
|
||||
match_constraints = intent.MatchTargetsConstraints(
|
||||
name=name,
|
||||
area_name=area,
|
||||
floor_name=floor_name,
|
||||
domains=[CLIMATE_DOMAIN],
|
||||
assistant=intent_obj.assistant,
|
||||
single_target=True,
|
||||
)
|
||||
match_result = intent.async_match_targets(
|
||||
hass, match_constraints, match_preferences
|
||||
)
|
||||
if not match_result.is_match:
|
||||
raise intent.MatchFailedError(
|
||||
result=match_result, constraints=match_constraints
|
||||
)
|
||||
|
||||
response = intent_obj.create_response()
|
||||
response.response_type = intent.IntentResponseType.QUERY_ANSWER
|
||||
response.async_set_states(matched_states=match_result.states)
|
||||
return response
|
||||
|
||||
|
||||
async def _async_process_intent(
|
||||
hass: HomeAssistant, domain: str, platform: IntentPlatformProtocol
|
||||
) -> None:
|
||||
|
@@ -13,6 +13,5 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/iron_os",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["pynecil"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["pynecil==4.0.1"]
|
||||
}
|
||||
|
@@ -21,10 +21,8 @@ rules:
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
runtime-data: done
|
||||
test-before-configure:
|
||||
status: exempt
|
||||
comment: Device is set up from a Bluetooth discovery
|
||||
test-before-setup: done
|
||||
test-before-configure: todo
|
||||
test-before-setup: todo
|
||||
unique-config-entry: done
|
||||
|
||||
# Silver
|
||||
@@ -72,9 +70,7 @@ rules:
|
||||
repair-issues:
|
||||
status: exempt
|
||||
comment: no repairs/issues
|
||||
stale-devices:
|
||||
status: exempt
|
||||
comment: Stale devices are removed with the config entry as there is only one device per entry
|
||||
stale-devices: todo
|
||||
|
||||
# Platinum
|
||||
async-dependency: done
|
||||
|
@@ -11,6 +11,7 @@
|
||||
},
|
||||
"config_subentries": {
|
||||
"entity": {
|
||||
"title": "Add entity",
|
||||
"step": {
|
||||
"add_sensor": {
|
||||
"description": "Configure the new sensor",
|
||||
@@ -26,12 +27,7 @@
|
||||
"state": "Initial state"
|
||||
}
|
||||
}
|
||||
},
|
||||
"initiate_flow": {
|
||||
"user": "Add sensor",
|
||||
"reconfigure": "Reconfigure sensor"
|
||||
},
|
||||
"entry_type": "Sensor"
|
||||
}
|
||||
}
|
||||
},
|
||||
"options": {
|
||||
|
@@ -28,7 +28,6 @@ from .const import (
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DATA_COMPONENT: HassKey[EntityComponent[LawnMowerEntity]] = HassKey(DOMAIN)
|
||||
ENTITY_ID_FORMAT = DOMAIN + ".{}"
|
||||
PLATFORM_SCHEMA = cv.PLATFORM_SCHEMA
|
||||
PLATFORM_SCHEMA_BASE = cv.PLATFORM_SCHEMA_BASE
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
@@ -47,7 +47,6 @@ PLATFORMS = [
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
Platform.VACUUM,
|
||||
Platform.WATER_HEATER,
|
||||
]
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
@@ -1,201 +0,0 @@
|
||||
"""Support for waterheater entities."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from thinqconnect import DeviceType
|
||||
from thinqconnect.integration import ExtendedProperty
|
||||
|
||||
from homeassistant.components.water_heater import (
|
||||
ATTR_OPERATION_MODE,
|
||||
STATE_ECO,
|
||||
STATE_HEAT_PUMP,
|
||||
STATE_OFF,
|
||||
STATE_PERFORMANCE,
|
||||
WaterHeaterEntity,
|
||||
WaterHeaterEntityDescription,
|
||||
WaterHeaterEntityFeature,
|
||||
)
|
||||
from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import ThinqConfigEntry
|
||||
from .coordinator import DeviceDataUpdateCoordinator
|
||||
from .entity import ThinQEntity
|
||||
|
||||
DEVICE_TYPE_WH_MAP: dict[DeviceType, WaterHeaterEntityDescription] = {
|
||||
DeviceType.WATER_HEATER: WaterHeaterEntityDescription(
|
||||
key=ExtendedProperty.WATER_HEATER,
|
||||
name=None,
|
||||
),
|
||||
DeviceType.SYSTEM_BOILER: WaterHeaterEntityDescription(
|
||||
key=ExtendedProperty.WATER_BOILER,
|
||||
name=None,
|
||||
),
|
||||
}
|
||||
|
||||
# Mapping between device and HA operation modes
|
||||
DEVICE_OP_MODE_TO_HA = {
|
||||
"auto": STATE_ECO,
|
||||
"heat_pump": STATE_HEAT_PUMP,
|
||||
"turbo": STATE_PERFORMANCE,
|
||||
"vacation": STATE_OFF,
|
||||
}
|
||||
HA_STATE_TO_DEVICE_OP_MODE = {v: k for k, v in DEVICE_OP_MODE_TO_HA.items()}
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: ThinqConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up an entry for water_heater platform."""
|
||||
entities: list[ThinQWaterHeaterEntity] = []
|
||||
for coordinator in entry.runtime_data.coordinators.values():
|
||||
if (
|
||||
description := DEVICE_TYPE_WH_MAP.get(coordinator.api.device.device_type)
|
||||
) is not None:
|
||||
if coordinator.api.device.device_type == DeviceType.WATER_HEATER:
|
||||
entities.append(
|
||||
ThinQWaterHeaterEntity(
|
||||
coordinator, description, ExtendedProperty.WATER_HEATER
|
||||
)
|
||||
)
|
||||
elif coordinator.api.device.device_type == DeviceType.SYSTEM_BOILER:
|
||||
entities.append(
|
||||
ThinQWaterBoilerEntity(
|
||||
coordinator, description, ExtendedProperty.WATER_BOILER
|
||||
)
|
||||
)
|
||||
if entities:
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class ThinQWaterHeaterEntity(ThinQEntity, WaterHeaterEntity):
|
||||
"""Represent a ThinQ water heater entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DeviceDataUpdateCoordinator,
|
||||
entity_description: WaterHeaterEntityDescription,
|
||||
property_id: str,
|
||||
) -> None:
|
||||
"""Initialize a water_heater entity."""
|
||||
super().__init__(coordinator, entity_description, property_id)
|
||||
self._attr_supported_features = (
|
||||
WaterHeaterEntityFeature.TARGET_TEMPERATURE
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
self._attr_temperature_unit = (
|
||||
self._get_unit_of_measurement(self.data.unit) or UnitOfTemperature.CELSIUS
|
||||
)
|
||||
if modes := self.data.job_modes:
|
||||
self._attr_operation_list = [
|
||||
DEVICE_OP_MODE_TO_HA.get(mode, mode) for mode in modes
|
||||
]
|
||||
else:
|
||||
self._attr_operation_list = [STATE_HEAT_PUMP]
|
||||
|
||||
def _update_status(self) -> None:
|
||||
"""Update status itself."""
|
||||
super()._update_status()
|
||||
self._attr_current_temperature = self.data.current_temp
|
||||
self._attr_target_temperature = self.data.target_temp
|
||||
|
||||
if self.data.max is not None:
|
||||
self._attr_max_temp = self.data.max
|
||||
if self.data.min is not None:
|
||||
self._attr_min_temp = self.data.min
|
||||
if self.data.step is not None:
|
||||
self._attr_target_temperature_step = self.data.step
|
||||
|
||||
self._attr_temperature_unit = (
|
||||
self._get_unit_of_measurement(self.data.unit) or UnitOfTemperature.CELSIUS
|
||||
)
|
||||
if self.data.is_on:
|
||||
self._attr_current_operation = (
|
||||
DEVICE_OP_MODE_TO_HA.get(job_mode, job_mode)
|
||||
if (job_mode := self.data.job_mode) is not None
|
||||
else STATE_HEAT_PUMP
|
||||
)
|
||||
else:
|
||||
self._attr_current_operation = STATE_OFF
|
||||
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] update status: c:%s, t:%s, op_mode:%s, op_list:%s, is_on:%s",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
self.current_temperature,
|
||||
self.target_temperature,
|
||||
self.current_operation,
|
||||
self.operation_list,
|
||||
self.data.is_on,
|
||||
)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperatures."""
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_set_temperature: %s",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
kwargs,
|
||||
)
|
||||
if (operation_mode := kwargs.get(ATTR_OPERATION_MODE)) is not None:
|
||||
await self.async_set_operation_mode(str(operation_mode))
|
||||
if operation_mode == STATE_OFF:
|
||||
return
|
||||
|
||||
if (
|
||||
temperature := kwargs.get(ATTR_TEMPERATURE)
|
||||
) is not None and temperature != self.target_temperature:
|
||||
await self.async_call_api(
|
||||
self.coordinator.api.async_set_target_temperature(
|
||||
self.property_id, temperature
|
||||
)
|
||||
)
|
||||
|
||||
async def async_set_operation_mode(self, operation_mode: str) -> None:
|
||||
"""Set new operation mode."""
|
||||
mode = HA_STATE_TO_DEVICE_OP_MODE.get(operation_mode, operation_mode)
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_set_operation_mode: %s",
|
||||
self.coordinator.device_name,
|
||||
self.property_id,
|
||||
mode,
|
||||
)
|
||||
await self.async_call_api(
|
||||
self.coordinator.api.async_set_job_mode(self.property_id, mode)
|
||||
)
|
||||
|
||||
|
||||
class ThinQWaterBoilerEntity(ThinQWaterHeaterEntity):
|
||||
"""Represent a ThinQ water boiler entity."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: DeviceDataUpdateCoordinator,
|
||||
entity_description: WaterHeaterEntityDescription,
|
||||
property_id: str,
|
||||
) -> None:
|
||||
"""Initialize a water_heater entity."""
|
||||
super().__init__(coordinator, entity_description, property_id)
|
||||
self._attr_supported_features |= WaterHeaterEntityFeature.ON_OFF
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_turn_on", self.coordinator.device_name, self.property_id
|
||||
)
|
||||
await self.async_call_api(self.coordinator.api.async_turn_on(self.property_id))
|
||||
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
_LOGGER.debug(
|
||||
"[%s:%s] async_turn_off", self.coordinator.device_name, self.property_id
|
||||
)
|
||||
await self.async_call_api(self.coordinator.api.async_turn_off(self.property_id))
|
@@ -66,7 +66,7 @@
|
||||
}
|
||||
},
|
||||
"set_state": {
|
||||
"name": "Set state",
|
||||
"name": "Set State",
|
||||
"description": "Sets a color/brightness and possibly turn the light on/off.",
|
||||
"fields": {
|
||||
"infrared": {
|
||||
@@ -209,11 +209,11 @@
|
||||
},
|
||||
"palette": {
|
||||
"name": "Palette",
|
||||
"description": "List of at least 2 and at most 16 colors as hue (0-360), saturation (0-100), brightness (0-100) and Kelvin (1500-9000) values to use for this effect. Overrides the 'Theme' attribute."
|
||||
"description": "List of at least 2 and at most 16 colors as hue (0-360), saturation (0-100), brightness (0-100) and kelvin (1500-9000) values to use for this effect. Overrides the theme attribute."
|
||||
},
|
||||
"theme": {
|
||||
"name": "[%key:component::lifx::entity::select::theme::name%]",
|
||||
"description": "Predefined color theme to use for the effect. Overridden by the 'Palette' attribute."
|
||||
"description": "Predefined color theme to use for the effect. Overridden by the palette attribute."
|
||||
},
|
||||
"power_on": {
|
||||
"name": "Power on",
|
||||
@@ -243,7 +243,7 @@
|
||||
},
|
||||
"palette": {
|
||||
"name": "Palette",
|
||||
"description": "List of 1 to 6 colors as hue (0-360), saturation (0-100), brightness (0-100) and Kelvin (1500-9000) values to use for this effect."
|
||||
"description": "List of 1 to 6 colors as hue (0-360), saturation (0-100), brightness (0-100) and kelvin (1500-9000) values to use for this effect."
|
||||
},
|
||||
"power_on": {
|
||||
"name": "Power on",
|
||||
@@ -256,16 +256,16 @@
|
||||
"description": "Stops a running effect."
|
||||
},
|
||||
"paint_theme": {
|
||||
"name": "Paint theme",
|
||||
"description": "Paints either a provided theme or custom palette across one or more LIFX lights.",
|
||||
"name": "Paint Theme",
|
||||
"description": "Paint either a provided theme or custom palette across one or more LIFX lights.",
|
||||
"fields": {
|
||||
"palette": {
|
||||
"name": "Palette",
|
||||
"description": "List of at least 2 and at most 16 colors as hue (0-360), saturation (0-100), brightness (0-100) and Kelvin (1500-9000) values to paint across the target lights. Overrides the 'Theme' attribute."
|
||||
"description": "List of at least 2 and at most 16 colors as hue (0-360), saturation (0-100), brightness (0-100) and kelvin (1500-9000) values to paint across the target lights. Overrides the theme attribute."
|
||||
},
|
||||
"theme": {
|
||||
"name": "[%key:component::lifx::entity::select::theme::name%]",
|
||||
"description": "Predefined color theme to paint. Overridden by the 'Palette' attribute."
|
||||
"description": "Predefined color theme to paint. Overridden by the palette attribute."
|
||||
},
|
||||
"transition": {
|
||||
"name": "Transition",
|
||||
|
@@ -1 +0,0 @@
|
||||
"""LinkedGo virtual integration."""
|
@@ -1,6 +0,0 @@
|
||||
{
|
||||
"domain": "linkedgo",
|
||||
"name": "LinkedGo",
|
||||
"integration_type": "virtual",
|
||||
"supported_by": "shelly"
|
||||
}
|
@@ -53,12 +53,12 @@
|
||||
},
|
||||
"services": {
|
||||
"set_hold_time": {
|
||||
"name": "Set hold time",
|
||||
"description": "Sets the time period to keep the temperature and override the schedule.",
|
||||
"name": "Set Hold Time",
|
||||
"description": "Sets the time to hold until.",
|
||||
"fields": {
|
||||
"time_period": {
|
||||
"name": "Time period",
|
||||
"description": "Duration for which to override the schedule."
|
||||
"name": "Time Period",
|
||||
"description": "Time to hold until."
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@@ -8,6 +8,6 @@
|
||||
"iot_class": "calculated",
|
||||
"loggers": ["yt_dlp"],
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["yt-dlp[default]==2025.02.19"],
|
||||
"requirements": ["yt-dlp[default]==2025.01.26"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
@@ -17,12 +17,12 @@
|
||||
},
|
||||
"media_content_type": {
|
||||
"name": "Media content type",
|
||||
"description": "The type of the content to play."
|
||||
"description": "The type of the content to play. Must be one of MUSIC, TVSHOW, VIDEO, EPISODE, CHANNEL or PLAYLIST MUSIC."
|
||||
}
|
||||
}
|
||||
},
|
||||
"extract_media_url": {
|
||||
"name": "Get media URL",
|
||||
"name": "Get Media URL",
|
||||
"description": "Extract media URL from a service.",
|
||||
"fields": {
|
||||
"url": {
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user