Compare commits

...

62 Commits

Author SHA1 Message Date
Martin Hjelmare
cdda624d8a Bump zwave-js-server-python to 0.68.0 2026-01-14 11:15:55 +01:00
dependabot[bot]
20102cd83f Bump j178/prek-action from 1.0.11 to 1.0.12 (#160902)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-14 10:28:11 +01:00
Erik Montnemery
6d6324dae5 Fix some reversed asserts in sensor group tests (#160905) 2026-01-14 09:43:26 +01:00
Erik Montnemery
2ee5410a6c Remove set of _attr_extra_state_attributes in sensor group (#160846)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2026-01-14 09:21:54 +01:00
Erik Montnemery
56f02a41ca Adjust sensor group behavior (#152167) 2026-01-14 08:23:34 +01:00
Erwin Douna
d43102de1b Bump pyportainer 1.0.23 (#160878) 2026-01-14 07:09:35 +01:00
Ludovic BOUÉ
2bcd02b296 Add MatterOutdoorTemperature attribute to Matter binary sensor discovery schema only if OutdoorTemperature exists (#160879) 2026-01-14 06:58:55 +01:00
Brett Adams
ad11c72488 Add retry logic to Teslemetry coordinators (#160756)
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
2026-01-14 01:36:43 +01:00
Manu
ddfa6f83c3 Refactor Namecheap DNS update logic to use a coordinator (#160863) 2026-01-14 01:34:27 +01:00
epenet
85baf7a41d Improve type hints in mobile_app notify (#160853)
Co-authored-by: Jan Bouwhuis <jbouwh@users.noreply.github.com>
2026-01-14 01:26:10 +01:00
epenet
bf4d5a0bab Improve type hints in telegram notify (#160855) 2026-01-14 01:26:00 +01:00
Erwin Douna
16527ba707 Melcloud small config flow refactor (#160892) 2026-01-14 01:15:36 +01:00
Brett Adams
0612ea4ee8 Bump tesla-fleet-api to 1.4.2 (#159616)
Co-authored-by: Claude <noreply@anthropic.com>
2026-01-14 01:14:58 +01:00
Ville Skyttä
9e842152f7 Upgrade prettier-plugin-sort to 4.2.0 (#160894) 2026-01-14 01:13:16 +01:00
Erwin Douna
63e79c3639 Firefly III add asyncio.gather pattern (#160886) 2026-01-14 01:12:44 +01:00
Erwin Douna
d0e4a7fa75 Melcloud Pythonic refactor init (#160891) 2026-01-14 00:38:41 +01:00
Glenn de Haan
815976b9a4 Add HDFury sensor platform (#160628) 2026-01-14 00:35:48 +01:00
scheric
86a5cc5edb Add keep_alive to generic_thermostat config flow (#156641)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2026-01-13 23:20:40 +00:00
Björn Ebbinghaus
3ebc08c5ec Prefer explicit DeviceClass over hint in entity_id in homekit (#152507)
Co-authored-by: Abílio Costa <abmantis@users.noreply.github.com>
2026-01-13 23:00:58 +00:00
Paul Bottein
1bcbebb00c Use config entity category for Matter door lock operating mode (#160507) 2026-01-13 23:46:54 +01:00
Jan Bouwhuis
2895225552 Improve test coverage on mobile app legacy notify service action (#160869) 2026-01-13 22:39:01 +01:00
Erwin Douna
f4f772ea31 Bump pyfirefly 0.1.11 (#160877) 2026-01-13 22:37:32 +01:00
Manu
66f60e6757 Add reconfigure flow to Namecheap integration (#160870) 2026-01-13 19:47:50 +00:00
Lukas
72d299f088 Mark pooldose as strictly typed (#160779)
Co-authored-by: epenet <6771947+epenet@users.noreply.github.com>
2026-01-13 19:40:52 +00:00
Thomas55555
9c66561381 Make pollutants dynamic in Google Air Quality (#160747) 2026-01-13 19:28:41 +00:00
Erik Montnemery
e762f839fa Improve sensor group tests (#160854) 2026-01-13 20:16:06 +01:00
Joost Lekkerkerker
0c9d97c89f Unmark integrations with a config flow as legacy (#160861) 2026-01-13 19:59:39 +01:00
Robert Resch
fb3ee34c81 Bump prek to 0.2.28 (#160864) 2026-01-13 18:59:07 +01:00
Daniel Hjelseth Høyer
cb99400128 Add Tibber binary sensors (#160365)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
Co-authored-by: Joost Lekkerkerker <joostlek@outlook.com>
2026-01-13 18:56:14 +01:00
divers33
58ef925a07 Refactor MELCloud integration to use DataUpdateCoordinator (#160131)
Co-authored-by: divers33 <divers33@users.noreply.github.com>
Co-authored-by: Claude Opus 4.5 <noreply@anthropic.com>
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-13 18:52:37 +01:00
Paul Tarjan
41bbfb8725 Add camera platform support to Hikvision integration (#160252)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-13 18:38:18 +01:00
Manu
ed226e31b1 Remove defusedxml dependency from Namecheap DynamicDNS integration (#160656) 2026-01-13 18:16:50 +01:00
Robert Resch
e900bb9770 Add support for packaging version >= 26 on the version bump script (#160858) 2026-01-13 18:14:46 +01:00
Matthias Alphart
d173d25072 Refactor KNX expose entity class (#160705) 2026-01-13 17:25:46 +01:00
Colin
0959896984 openevse: Use a data update coordinator (#160757)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-13 17:04:56 +01:00
epenet
4a3ae454b8 Improve type hints in pushsafer notify (#160851) 2026-01-13 16:46:01 +01:00
Joost Lekkerkerker
f2cf6b69bf Use extended entity descriptions in openevse (#160611) 2026-01-13 16:44:29 +01:00
epenet
176f847ebb Split Tuya climate wrappers (#160839) 2026-01-13 16:38:40 +01:00
epenet
277419aafb Fix logging in mycroft notify (#160852) 2026-01-13 16:28:17 +01:00
Willem-Jan van Rootselaar
d2b8d165d7 Optimize BSB-Lan integration startup (#160784) 2026-01-13 16:07:33 +01:00
Jamin
bf74e67700 Bump voip-utils to 0.3.5 (#160848) 2026-01-13 16:03:55 +01:00
Chris
5c3b85a37a Add authentication to config flow in openevse (#160521)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-13 16:03:40 +01:00
Manu
8543f3f989 Add config flow to Namecheap DynamicDNS integration (#160841) 2026-01-13 15:46:15 +01:00
Sebastian YEPES
52a8a66a91 Bump qingping-ble to 1.1.0 (#160815) 2026-01-13 15:35:50 +01:00
dependabot[bot]
002a931e70 Bump github/codeql-action from 4.31.9 to 4.31.10 (#160829)
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-01-13 15:33:27 +01:00
Daniel Hjelseth Høyer
0667bfc81d Remove old migration for Tibber (#160845) 2026-01-13 15:31:28 +01:00
Michael Hansen
329b2c840d Revert back to microVAD (#160821) 2026-01-13 08:09:17 -06:00
Robert Resch
ea7e94bcc1 Replace pre-commit by prek (#160427) 2026-01-13 15:09:02 +01:00
nasWebio
cc30add73a Add climate platform to NASweb integration (#141583)
Co-authored-by: Erik Montnemery <erik@montnemery.com>
2026-01-13 14:55:12 +01:00
Simone Chemelli
21cfb9a0e5 Add guest Wi-Fi QR code for Vodafone Station (#160307)
Co-authored-by: Joostlek <joostlek@outlook.com>
2026-01-13 13:57:59 +01:00
Erik Montnemery
143eadd887 Remove progress_step date entry flow decorator (#160844) 2026-01-13 13:52:57 +01:00
Erik Montnemery
855da1d070 Adjust light condition test (#160831) 2026-01-13 10:58:34 +01:00
AlCalzone
d5be76d7e6 Make integration scaffolding a bit more newbie-friendly (#160837) 2026-01-13 10:39:49 +01:00
Matthias Alphart
5f396332df Update xknx to 3.14.0 (#160813) 2026-01-13 10:22:49 +01:00
Kevin Stillhammer
56e638e170 accept leading zeros in sms_code for fressnapf_tracker (#160834) 2026-01-13 10:18:15 +01:00
Norbert Rittel
52b90c7706 Make light conditions consistent with triggers and actions (#160477) 2026-01-13 09:45:31 +01:00
Erik Montnemery
a6221d16b6 Add helper for creating entity condition tests (#160425) 2026-01-13 08:25:41 +01:00
tronikos
51701cab7c Bump opower to 0.16.2 (#160822) 2026-01-12 19:20:06 -08:00
Raphael Hehl
010e1f2d0d Bump uiprotect to 8.1.1 (#160816) 2026-01-12 23:06:50 +01:00
Jonathan de Jong
66909fc9ca Support HVAC mode in set temperature calls in Mill (#155416)
Co-authored-by: Martin Hjelmare <marhje52@gmail.com>
2026-01-12 21:46:20 +01:00
Lukas
90a28c95c8 Bump python-pooldose to 0.8.2 (#160800) 2026-01-12 20:20:33 +01:00
Erik Montnemery
83f2c53e8c Disable pyright type checking in VS Code (#160528) 2026-01-12 20:19:19 +01:00
163 changed files with 6001 additions and 1811 deletions

View File

@@ -40,7 +40,8 @@
"python.terminal.activateEnvInCurrentTerminal": true,
"python.testing.pytestArgs": ["--no-cov"],
"pylint.importStrategy": "fromEnvironment",
"python.analysis.typeCheckingMode": "basic",
// Pyright type checking is not compatible with mypy which Home Assistant uses for type checking
"python.analysis.typeCheckingMode": "off",
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"editor.formatOnType": true,

View File

@@ -847,8 +847,8 @@ rules:
## Development Commands
### Code Quality & Linting
- **Run all linters on all files**: `pre-commit run --all-files`
- **Run linters on staged files only**: `pre-commit run`
- **Run all linters on all files**: `prek run --all-files`
- **Run linters on staged files only**: `prek run`
- **PyLint on everything** (slow): `pylint homeassistant`
- **PyLint on specific folder**: `pylint homeassistant/components/my_integration`
- **MyPy type checking (whole project)**: `mypy homeassistant/`

View File

@@ -59,7 +59,6 @@ env:
# 15 is the latest version
# - 15.2 is the latest (as of 9 Feb 2023)
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
PRE_COMMIT_CACHE: ~/.cache/pre-commit
UV_CACHE_DIR: /tmp/uv-cache
APT_CACHE_BASE: /home/runner/work/apt
APT_CACHE_DIR: /home/runner/work/apt/cache
@@ -83,7 +82,6 @@ jobs:
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
integrations: ${{ steps.integrations.outputs.changes }}
apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }}
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
requirements: ${{ steps.core.outputs.requirements }}
mariadb_groups: ${{ steps.info.outputs.mariadb_groups }}
@@ -111,11 +109,6 @@ jobs:
hashFiles('requirements_all.txt') }}-${{
hashFiles('homeassistant/package_constraints.txt') }}-${{
hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT
- name: Generate partial pre-commit restore key
id: generate_pre-commit_cache_key
run: >-
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
- name: Generate partial apt restore key
id: generate_apt_cache_key
run: |
@@ -244,8 +237,8 @@ jobs:
echo "skip_coverage: ${skip_coverage}"
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
pre-commit:
name: Prepare pre-commit base
prek:
name: Run prek checks
runs-on: *runs-on-ubuntu
needs: [info]
if: |
@@ -254,147 +247,23 @@ jobs:
&& github.event.inputs.audit-licenses-only != 'true'
steps:
- *checkout
- &setup-python-default
name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- name: Restore base Python virtual environment
id: cache-venv
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: venv
key: &key-pre-commit-venv >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Create Python virtual environment
if: steps.cache-venv.outputs.cache-hit != 'true'
run: |
python -m venv venv
. venv/bin/activate
python --version
pip install "$(grep '^uv' < requirements.txt)"
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
- name: Restore pre-commit environment from cache
id: cache-precommit
uses: *actions-cache
with:
path: ${{ env.PRE_COMMIT_CACHE }}
lookup-only: true
key: &key-pre-commit-env >-
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
needs.info.outputs.pre-commit_cache_key }}
- name: Install pre-commit dependencies
if: steps.cache-precommit.outputs.cache-hit != 'true'
run: |
. venv/bin/activate
pre-commit install-hooks
lint-ruff-format:
name: Check ruff-format
runs-on: *runs-on-ubuntu
needs: &needs-pre-commit
- info
- pre-commit
steps:
- *checkout
- *setup-python-default
- &cache-restore-pre-commit-venv
name: Restore base Python virtual environment
id: cache-venv
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: venv
fail-on-cache-miss: true
key: *key-pre-commit-venv
- &cache-restore-pre-commit-env
name: Restore pre-commit environment from cache
id: cache-precommit
uses: *actions-cache-restore
with:
path: ${{ env.PRE_COMMIT_CACHE }}
fail-on-cache-miss: true
key: *key-pre-commit-env
- name: Run ruff-format
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-ruff:
name: Check ruff
runs-on: *runs-on-ubuntu
needs: *needs-pre-commit
steps:
- *checkout
- *setup-python-default
- *cache-restore-pre-commit-venv
- *cache-restore-pre-commit-env
- name: Run ruff
run: |
. venv/bin/activate
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
env:
RUFF_OUTPUT_FORMAT: github
lint-other:
name: Check other linters
runs-on: *runs-on-ubuntu
needs: *needs-pre-commit
steps:
- *checkout
- *setup-python-default
- *cache-restore-pre-commit-venv
- *cache-restore-pre-commit-env
- name: Register yamllint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
- name: Run yamllint
run: |
. venv/bin/activate
pre-commit run --hook-stage manual yamllint --all-files --show-diff-on-failure
- name: Register check-json problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/check-json.json"
- name: Run check-json
run: |
. venv/bin/activate
pre-commit run --hook-stage manual check-json --all-files --show-diff-on-failure
- name: Run prettier (fully)
if: needs.info.outputs.test_full_suite == 'true'
run: |
. venv/bin/activate
pre-commit run --hook-stage manual prettier --all-files --show-diff-on-failure
- name: Run prettier (partially)
if: needs.info.outputs.test_full_suite == 'false'
shell: bash
run: |
. venv/bin/activate
shopt -s globstar
pre-commit run --hook-stage manual prettier --show-diff-on-failure --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
- name: Register check executables problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
- name: Run executables check
run: |
. venv/bin/activate
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files --show-diff-on-failure
- name: Register codespell problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/codespell.json"
- name: Run codespell
run: |
. venv/bin/activate
pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files
- name: Run prek
uses: j178/prek-action@9d6a3097e0c1865ecce00cfb89fe80f2ee91b547 # v1.0.12
env:
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
RUFF_OUTPUT_FORMAT: github
lint-hadolint:
name: Check ${{ matrix.file }}
@@ -434,7 +303,7 @@ jobs:
- &setup-python-matrix
name: Set up Python ${{ matrix.python-version }}
id: python
uses: *actions-setup-python
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
with:
python-version: ${{ matrix.python-version }}
check-latest: true
@@ -447,7 +316,7 @@ jobs:
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
- name: Restore base Python virtual environment
id: cache-venv
uses: *actions-cache
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: venv
key: &key-python-venv >-
@@ -562,7 +431,7 @@ jobs:
steps:
- &cache-restore-apt
name: Restore apt cache
uses: *actions-cache-restore
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
with:
path: *path-apt-cache
fail-on-cache-miss: true
@@ -579,7 +448,13 @@ jobs:
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
libturbojpeg
- *checkout
- *setup-python-default
- &setup-python-default
name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: *actions-setup-python
with:
python-version: ${{ env.DEFAULT_PYTHON }}
check-latest: true
- &cache-restore-python-default
name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
id: cache-venv
@@ -782,9 +657,7 @@ jobs:
- base
- gen-requirements-all
- hassfest
- lint-other
- lint-ruff
- lint-ruff-format
- prek
- mypy
steps:
- *cache-restore-apt
@@ -823,9 +696,7 @@ jobs:
- base
- gen-requirements-all
- hassfest
- lint-other
- lint-ruff
- lint-ruff-format
- prek
- mypy
- prepare-pytest-full
if: |
@@ -949,9 +820,7 @@ jobs:
- base
- gen-requirements-all
- hassfest
- lint-other
- lint-ruff
- lint-ruff-format
- prek
- mypy
if: |
needs.info.outputs.lint_only != 'true'
@@ -1066,9 +935,7 @@ jobs:
- base
- gen-requirements-all
- hassfest
- lint-other
- lint-ruff
- lint-ruff-format
- prek
- mypy
if: |
needs.info.outputs.lint_only != 'true'
@@ -1202,9 +1069,7 @@ jobs:
- base
- gen-requirements-all
- hassfest
- lint-other
- lint-ruff
- lint-ruff-format
- prek
- mypy
if: |
needs.info.outputs.lint_only != 'true'

View File

@@ -24,11 +24,11 @@ jobs:
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- name: Initialize CodeQL
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
uses: github/codeql-action/init@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
with:
languages: python
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
uses: github/codeql-action/analyze@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
with:
category: "/language:python"

View File

@@ -39,14 +39,14 @@ repos:
- id: prettier
additional_dependencies:
- prettier@3.6.2
- prettier-plugin-sort-json@4.1.1
- prettier-plugin-sort-json@4.2.0
- repo: https://github.com/cdce8p/python-typing-update
rev: v0.6.0
hooks:
# Run `python-typing-update` hook manually from time to time
# to update python typing syntax.
# Will require manual work, before submitting changes!
# pre-commit run --hook-stage manual python-typing-update --all-files
# prek run --hook-stage manual python-typing-update --all-files
- id: python-typing-update
stages: [manual]
args:

View File

@@ -407,6 +407,7 @@ homeassistant.components.person.*
homeassistant.components.pi_hole.*
homeassistant.components.ping.*
homeassistant.components.plugwise.*
homeassistant.components.pooldose.*
homeassistant.components.portainer.*
homeassistant.components.powerfox.*
homeassistant.components.powerwall.*

View File

@@ -7,8 +7,8 @@
"python.testing.pytestEnabled": false,
// https://code.visualstudio.com/docs/python/linting#_general-settings
"pylint.importStrategy": "fromEnvironment",
// Pyright is too pedantic for Home Assistant
"python.analysis.typeCheckingMode": "basic",
// Pyright type checking is not compatible with mypy which Home Assistant uses for type checking
"python.analysis.typeCheckingMode": "off",
"[python]": {
"editor.defaultFormatter": "charliermarsh.ruff",
},

6
.vscode/tasks.json vendored
View File

@@ -45,7 +45,7 @@
{
"label": "Ruff",
"type": "shell",
"command": "pre-commit run ruff-check --all-files",
"command": "prek run ruff-check --all-files",
"group": {
"kind": "test",
"isDefault": true
@@ -57,9 +57,9 @@
"problemMatcher": []
},
{
"label": "Pre-commit",
"label": "Prek",
"type": "shell",
"command": "pre-commit run --show-diff-on-failure",
"command": "prek run --show-diff-on-failure",
"group": {
"kind": "test",
"isDefault": true

2
CODEOWNERS generated
View File

@@ -1068,6 +1068,8 @@ build.json @home-assistant/supervisor
/tests/components/myuplink/ @pajzo @astrandb
/homeassistant/components/nam/ @bieniu
/tests/components/nam/ @bieniu
/homeassistant/components/namecheapdns/ @tr4nt0r
/tests/components/namecheapdns/ @tr4nt0r
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
/tests/components/nanoleaf/ @milanmeu @joostlek
/homeassistant/components/nasweb/ @nasWebio

View File

@@ -3,9 +3,8 @@
from abc import ABC, abstractmethod
from dataclasses import dataclass
import logging
import math
from pysilero_vad import SileroVoiceActivityDetector
from pymicro_vad import MicroVad
from pyspeex_noise import AudioProcessor
from .const import BYTES_PER_CHUNK
@@ -43,8 +42,8 @@ class AudioEnhancer(ABC):
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
class SileroVadSpeexEnhancer(AudioEnhancer):
"""Audio enhancer that runs Silero VAD and speex."""
class MicroVadSpeexEnhancer(AudioEnhancer):
"""Audio enhancer that runs microVAD and speex."""
def __init__(
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
@@ -70,49 +69,21 @@ class SileroVadSpeexEnhancer(AudioEnhancer):
self.noise_suppression,
)
self.vad: SileroVoiceActivityDetector | None = None
# We get 10ms chunks but Silero works on 32ms chunks, so we have to
# buffer audio. The previous speech probability is used until enough
# audio has been buffered.
self._vad_buffer: bytearray | None = None
self._vad_buffer_chunks = 0
self._vad_buffer_chunk_idx = 0
self._last_speech_probability: float | None = None
self.vad: MicroVad | None = None
if self.is_vad_enabled:
self.vad = SileroVoiceActivityDetector()
# VAD buffer is a multiple of 10ms, but Silero VAD needs 32ms.
self._vad_buffer_chunks = int(
math.ceil(self.vad.chunk_bytes() / BYTES_PER_CHUNK)
)
self._vad_leftover_bytes = self.vad.chunk_bytes() - BYTES_PER_CHUNK
self._vad_buffer = bytearray(self.vad.chunk_bytes())
_LOGGER.debug("Initialized Silero VAD")
self.vad = MicroVad()
_LOGGER.debug("Initialized microVAD")
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
speech_probability: float | None = None
assert len(audio) == BYTES_PER_CHUNK
if self.vad is not None:
# Run VAD
assert self._vad_buffer is not None
start_idx = self._vad_buffer_chunk_idx * BYTES_PER_CHUNK
self._vad_buffer[start_idx : start_idx + BYTES_PER_CHUNK] = audio
self._vad_buffer_chunk_idx += 1
if self._vad_buffer_chunk_idx >= self._vad_buffer_chunks:
# We have enough data to run Silero VAD (32 ms)
self._last_speech_probability = self.vad.process_chunk(
self._vad_buffer[: self.vad.chunk_bytes()]
)
# Copy leftover audio that wasn't processed to start
self._vad_buffer[: self._vad_leftover_bytes] = self._vad_buffer[
-self._vad_leftover_bytes :
]
self._vad_buffer_chunk_idx = 0
speech_probability = self.vad.Process10ms(audio)
if self.audio_processor is not None:
# Run noise suppression and auto gain
@@ -121,5 +92,5 @@ class SileroVadSpeexEnhancer(AudioEnhancer):
return EnhancedAudioChunk(
audio=audio,
timestamp_ms=timestamp_ms,
speech_probability=self._last_speech_probability,
speech_probability=speech_probability,
)

View File

@@ -8,5 +8,5 @@
"integration_type": "system",
"iot_class": "local_push",
"quality_scale": "internal",
"requirements": ["pysilero-vad==3.2.0", "pyspeex-noise==1.0.2"]
"requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"]
}

View File

@@ -55,7 +55,7 @@ from homeassistant.util import (
from homeassistant.util.hass_dict import HassKey
from homeassistant.util.limited_size_dict import LimitedSizeDict
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, SileroVadSpeexEnhancer
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
from .const import (
ACKNOWLEDGE_PATH,
BYTES_PER_CHUNK,
@@ -633,7 +633,7 @@ class PipelineRun:
# Initialize with audio settings
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
# Default audio enhancer
self.audio_enhancer = SileroVadSpeexEnhancer(
self.audio_enhancer = MicroVadSpeexEnhancer(
self.audio_settings.auto_gain_dbfs,
self.audio_settings.noise_suppression_level,
self.audio_settings.is_vad_enabled,

View File

@@ -1,5 +1,6 @@
"""The BSB-Lan integration."""
import asyncio
import dataclasses
from bsblan import (
@@ -77,12 +78,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
bsblan = BSBLAN(config, session)
try:
# Initialize the client first - this sets up internal caches and validates the connection
# Initialize the client first - this sets up internal caches and validates
# the connection by fetching firmware version
await bsblan.initialize()
# Fetch all required device metadata
device = await bsblan.device()
info = await bsblan.info()
static = await bsblan.static_values()
# Fetch device metadata in parallel for faster startup
device, info, static = await asyncio.gather(
bsblan.device(),
bsblan.info(),
bsblan.static_values(),
)
except BSBLANConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
@@ -110,10 +115,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
fast_coordinator = BSBLanFastCoordinator(hass, entry, bsblan)
slow_coordinator = BSBLanSlowCoordinator(hass, entry, bsblan)
# Perform first refresh of both coordinators
# Perform first refresh of fast coordinator (required for entities)
await fast_coordinator.async_config_entry_first_refresh()
# Try to refresh slow coordinator, but don't fail if DHW is not available
# Refresh slow coordinator - don't fail if DHW is not available
# This allows the integration to work even if the device doesn't support DHW
await slow_coordinator.async_refresh()

View File

@@ -2,7 +2,6 @@
from dataclasses import dataclass
from datetime import timedelta
from random import randint
from bsblan import (
BSBLAN,
@@ -23,6 +22,17 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
from .const import DOMAIN, LOGGER, SCAN_INTERVAL_FAST, SCAN_INTERVAL_SLOW
# Filter lists for optimized API calls - only fetch parameters we actually use
# This significantly reduces response time (~0.2s per parameter saved)
STATE_INCLUDE = ["current_temperature", "target_temperature", "hvac_mode"]
SENSOR_INCLUDE = ["current_temperature", "outside_temperature"]
DHW_STATE_INCLUDE = [
"operating_mode",
"nominal_setpoint",
"dhw_actual_value_top_temperature",
]
DHW_CONFIG_INCLUDE = ["reduced_setpoint", "nominal_setpoint_max"]
@dataclass
class BSBLanFastData:
@@ -80,26 +90,18 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
config_entry,
client,
name=f"{DOMAIN}_fast_{config_entry.data[CONF_HOST]}",
update_interval=self._get_update_interval(),
update_interval=SCAN_INTERVAL_FAST,
)
def _get_update_interval(self) -> timedelta:
"""Get the update interval with a random offset.
Add a random number of seconds to avoid timeouts when
the BSB-Lan device is already/still busy retrieving data,
e.g. for MQTT or internal logging.
"""
return SCAN_INTERVAL_FAST + timedelta(seconds=randint(1, 8))
async def _async_update_data(self) -> BSBLanFastData:
"""Fetch fast-changing data from the BSB-Lan device."""
try:
# Client is already initialized in async_setup_entry
# Fetch fast-changing data (state, sensor, DHW state)
state = await self.client.state()
sensor = await self.client.sensor()
dhw = await self.client.hot_water_state()
# Use include filtering to only fetch parameters we actually use
# This reduces response time significantly (~0.2s per parameter)
state = await self.client.state(include=STATE_INCLUDE)
sensor = await self.client.sensor(include=SENSOR_INCLUDE)
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
except BSBLANAuthError as err:
raise ConfigEntryAuthFailed(
@@ -111,9 +113,6 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
f"Error while establishing connection with BSB-Lan device at {host}"
) from err
# Update the interval with random jitter for next update
self.update_interval = self._get_update_interval()
return BSBLanFastData(
state=state,
sensor=sensor,
@@ -143,8 +142,8 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
"""Fetch slow-changing data from the BSB-Lan device."""
try:
# Client is already initialized in async_setup_entry
# Fetch slow-changing configuration data
dhw_config = await self.client.hot_water_config()
# Use include filtering to only fetch parameters we actually use
dhw_config = await self.client.hot_water_config(include=DHW_CONFIG_INCLUDE)
dhw_schedule = await self.client.hot_water_schedule()
except AttributeError:

View File

@@ -7,6 +7,5 @@
"integration_type": "service",
"iot_class": "local_push",
"loggers": ["datadog"],
"quality_scale": "legacy",
"requirements": ["datadog==0.52.0"]
}

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import asyncio
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
@@ -98,16 +99,29 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
try:
accounts = await self.firefly.get_accounts()
categories = await self.firefly.get_categories()
category_details = [
await self.firefly.get_category(
category_id=int(category.id), start=start_date, end=end_date
(
categories,
primary_currency,
budgets,
bills,
) = await asyncio.gather(
self.firefly.get_categories(),
self.firefly.get_currency_primary(),
self.firefly.get_budgets(start=start_date, end=end_date),
self.firefly.get_bills(),
)
category_details = await asyncio.gather(
*(
self.firefly.get_category(
category_id=int(category.id),
start=start_date,
end=end_date,
)
for category in categories
)
for category in categories
]
primary_currency = await self.firefly.get_currency_primary()
budgets = await self.firefly.get_budgets(start=start_date, end=end_date)
bills = await self.firefly.get_bills()
)
except FireflyAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,

View File

@@ -7,5 +7,5 @@
"integration_type": "service",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyfirefly==0.1.10"]
"requirements": ["pyfirefly==0.1.11"]
}

View File

@@ -31,7 +31,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
)
STEP_SMS_CODE_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_SMS_CODE): int,
vol.Required(CONF_SMS_CODE): str,
}
)
@@ -75,7 +75,7 @@ class FressnapfTrackerConfigFlow(ConfigFlow, domain=DOMAIN):
return errors, False
async def _async_verify_sms_code(
self, sms_code: int
self, sms_code: str
) -> tuple[dict[str, str], str | None]:
"""Verify SMS code and return errors and access_token."""
errors: dict[str, str] = {}

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["fressnapftracker==0.2.0"]
"requirements": ["fressnapftracker==0.2.1"]
}

View File

@@ -66,6 +66,7 @@ from .const import (
CONF_COLD_TOLERANCE,
CONF_HEATER,
CONF_HOT_TOLERANCE,
CONF_KEEP_ALIVE,
CONF_MAX_TEMP,
CONF_MIN_DUR,
CONF_MIN_TEMP,
@@ -81,7 +82,6 @@ _LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Generic Thermostat"
CONF_INITIAL_HVAC_MODE = "initial_hvac_mode"
CONF_KEEP_ALIVE = "keep_alive"
CONF_PRECISION = "precision"
CONF_TARGET_TEMP = "target_temp"
CONF_TEMP_STEP = "target_temp_step"

View File

@@ -21,6 +21,7 @@ from .const import (
CONF_COLD_TOLERANCE,
CONF_HEATER,
CONF_HOT_TOLERANCE,
CONF_KEEP_ALIVE,
CONF_MAX_TEMP,
CONF_MIN_DUR,
CONF_MIN_TEMP,
@@ -59,6 +60,9 @@ OPTIONS_SCHEMA = {
vol.Optional(CONF_MIN_DUR): selector.DurationSelector(
selector.DurationSelectorConfig(allow_negative=False)
),
vol.Optional(CONF_KEEP_ALIVE): selector.DurationSelector(
selector.DurationSelectorConfig(allow_negative=False)
),
vol.Optional(CONF_MIN_TEMP): selector.NumberSelector(
selector.NumberSelectorConfig(
mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1

View File

@@ -33,4 +33,5 @@ CONF_PRESETS = {
)
}
CONF_SENSOR = "target_sensor"
CONF_KEEP_ALIVE = "keep_alive"
DEFAULT_TOLERANCE = 0.3

View File

@@ -18,6 +18,7 @@
"cold_tolerance": "Cold tolerance",
"heater": "Actuator switch",
"hot_tolerance": "Hot tolerance",
"keep_alive": "Keep-alive interval",
"max_temp": "Maximum target temperature",
"min_cycle_duration": "Minimum cycle duration",
"min_temp": "Minimum target temperature",
@@ -29,6 +30,7 @@
"cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor goes below 24.5.",
"heater": "Switch entity used to cool or heat depending on A/C mode.",
"hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5.",
"keep_alive": "Trigger the heater periodically to keep devices from losing state. When set, min cycle duration is ignored.",
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.",
"target_sensor": "Temperature sensor that reflects the current temperature."
},
@@ -45,6 +47,7 @@
"cold_tolerance": "[%key:component::generic_thermostat::config::step::user::data::cold_tolerance%]",
"heater": "[%key:component::generic_thermostat::config::step::user::data::heater%]",
"hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data::hot_tolerance%]",
"keep_alive": "[%key:component::generic_thermostat::config::step::user::data::keep_alive%]",
"max_temp": "[%key:component::generic_thermostat::config::step::user::data::max_temp%]",
"min_cycle_duration": "[%key:component::generic_thermostat::config::step::user::data::min_cycle_duration%]",
"min_temp": "[%key:component::generic_thermostat::config::step::user::data::min_temp%]",
@@ -55,6 +58,7 @@
"cold_tolerance": "[%key:component::generic_thermostat::config::step::user::data_description::cold_tolerance%]",
"heater": "[%key:component::generic_thermostat::config::step::user::data_description::heater%]",
"hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data_description::hot_tolerance%]",
"keep_alive": "[%key:component::generic_thermostat::config::step::user::data_description::keep_alive%]",
"min_cycle_duration": "[%key:component::generic_thermostat::config::step::user::data_description::min_cycle_duration%]",
"target_sensor": "[%key:component::generic_thermostat::config::step::user::data_description::target_sensor%]"
}

View File

@@ -112,6 +112,7 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.CO,
native_unit_of_measurement_fn=lambda x: x.pollutants.co.concentration.units,
exists_fn=lambda x: "co" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.co.concentration.value,
),
AirQualitySensorEntityDescription(
@@ -143,6 +144,7 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
translation_key="nitrogen_dioxide",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement_fn=lambda x: x.pollutants.no2.concentration.units,
exists_fn=lambda x: "no2" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.no2.concentration.value,
),
AirQualitySensorEntityDescription(
@@ -150,6 +152,7 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
translation_key="ozone",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement_fn=lambda x: x.pollutants.o3.concentration.units,
exists_fn=lambda x: "o3" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.o3.concentration.value,
),
AirQualitySensorEntityDescription(
@@ -157,6 +160,7 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.PM10,
native_unit_of_measurement_fn=lambda x: x.pollutants.pm10.concentration.units,
exists_fn=lambda x: "pm10" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.pm10.concentration.value,
),
AirQualitySensorEntityDescription(
@@ -164,6 +168,7 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
state_class=SensorStateClass.MEASUREMENT,
device_class=SensorDeviceClass.PM25,
native_unit_of_measurement_fn=lambda x: x.pollutants.pm25.concentration.units,
exists_fn=lambda x: "pm25" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.pm25.concentration.value,
),
AirQualitySensorEntityDescription(
@@ -171,6 +176,7 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
translation_key="sulphur_dioxide",
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement_fn=lambda x: x.pollutants.so2.concentration.units,
exists_fn=lambda x: "so2" in {p.code for p in x.pollutants},
value_fn=lambda x: x.pollutants.so2.concentration.value,
),
)

View File

@@ -346,7 +346,6 @@ class SensorGroup(GroupEntity, SensorEntity):
self._attr_name = name
if name == DEFAULT_NAME:
self._attr_name = f"{DEFAULT_NAME} {sensor_type}".capitalize()
self._attr_extra_state_attributes = {ATTR_ENTITY_ID: entity_ids}
self._attr_unique_id = unique_id
self._ignore_non_numeric = ignore_non_numeric
self.mode = all if ignore_non_numeric is False else any
@@ -374,7 +373,7 @@ class SensorGroup(GroupEntity, SensorEntity):
def async_update_group_state(self) -> None:
"""Query all members and determine the sensor group state."""
self.calculate_state_attributes(self._get_valid_entities())
states: list[str] = []
states: list[str | None] = []
valid_units = self._valid_units
valid_states: list[bool] = []
sensor_values: list[tuple[str, float, State]] = []
@@ -435,9 +434,12 @@ class SensorGroup(GroupEntity, SensorEntity):
state.attributes.get("unit_of_measurement"),
self.entity_id,
)
else:
states.append(None)
valid_states.append(False)
# Set group as unavailable if all members do not have numeric values
self._attr_available = any(numeric_state for numeric_state in valid_states)
# Set group as unavailable if all members are unavailable or missing
self._attr_available = not all(s in (STATE_UNAVAILABLE, None) for s in states)
valid_state = self.mode(
state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) for state in states
@@ -446,6 +448,7 @@ class SensorGroup(GroupEntity, SensorEntity):
if not valid_state or not valid_state_numeric:
self._attr_native_value = None
self._extra_state_attribute = {}
return
# Calculate values

View File

@@ -8,6 +8,7 @@ from .coordinator import HDFuryConfigEntry, HDFuryCoordinator
PLATFORMS = [
Platform.BUTTON,
Platform.SELECT,
Platform.SENSOR,
Platform.SWITCH,
]

View File

@@ -16,6 +16,50 @@
"default": "mdi:hdmi-port"
}
},
"sensor": {
"aud0": {
"default": "mdi:audio-input-rca"
},
"aud1": {
"default": "mdi:audio-input-rca"
},
"audout": {
"default": "mdi:television-speaker"
},
"earcrx": {
"default": "mdi:audio-video"
},
"edida0": {
"default": "mdi:format-list-text"
},
"edida1": {
"default": "mdi:format-list-text"
},
"edida2": {
"default": "mdi:format-list-text"
},
"rx0": {
"default": "mdi:video-input-hdmi"
},
"rx1": {
"default": "mdi:video-input-hdmi"
},
"sink0": {
"default": "mdi:television"
},
"sink1": {
"default": "mdi:television"
},
"sink2": {
"default": "mdi:audio-video"
},
"tx0": {
"default": "mdi:cable-data"
},
"tx1": {
"default": "mdi:cable-data"
}
},
"switch": {
"autosw": {
"default": "mdi:import"

View File

@@ -0,0 +1,121 @@
"""Sensor platform for HDFury Integration."""
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import HDFuryConfigEntry
from .entity import HDFuryEntity
SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="RX0",
translation_key="rx0",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="RX1",
translation_key="rx1",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="TX0",
translation_key="tx0",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="TX1",
translation_key="tx1",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="AUD0",
translation_key="aud0",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="AUD1",
translation_key="aud1",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="AUDOUT",
translation_key="audout",
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="EARCRX",
translation_key="earcrx",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="SINK0",
translation_key="sink0",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="SINK1",
translation_key="sink1",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="SINK2",
translation_key="sink2",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="EDIDA0",
translation_key="edida0",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="EDIDA1",
translation_key="edida1",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
SensorEntityDescription(
key="EDIDA2",
translation_key="edida2",
entity_registry_enabled_default=False,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: HDFuryConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up sensors using the platform schema."""
coordinator = entry.runtime_data
async_add_entities(
HDFurySensor(coordinator, description)
for description in SENSORS
if description.key in coordinator.data.info
)
class HDFurySensor(HDFuryEntity, SensorEntity):
"""Base HDFury Sensor Class."""
entity_description: SensorEntityDescription
@property
def native_value(self) -> str:
"""Set Sensor Value."""
return self.coordinator.data.info[self.entity_description.key]

View File

@@ -57,6 +57,50 @@
}
}
},
"sensor": {
"aud0": {
"name": "Audio TX0"
},
"aud1": {
"name": "Audio TX1"
},
"audout": {
"name": "Audio output"
},
"earcrx": {
"name": "eARC/ARC status"
},
"edida0": {
"name": "EDID TXA0"
},
"edida1": {
"name": "EDID TXA1"
},
"edida2": {
"name": "EDID AUDA"
},
"rx0": {
"name": "Input RX0"
},
"rx1": {
"name": "Input RX1"
},
"sink0": {
"name": "EDID TX0"
},
"sink1": {
"name": "EDID TX1"
},
"sink2": {
"name": "EDID AUD"
},
"tx0": {
"name": "Output TX0"
},
"tx1": {
"name": "Output TX1"
}
},
"switch": {
"autosw": {
"name": "Auto switch inputs"

View File

@@ -20,10 +20,13 @@ from homeassistant.const import (
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
PLATFORMS = [Platform.BINARY_SENSOR]
PLATFORMS = [Platform.BINARY_SENSOR, Platform.CAMERA]
@dataclass
@@ -104,6 +107,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
# Start the event stream
await hass.async_add_executor_job(camera.start_stream)
# Register the main device before platforms that use via_device
device_registry = dr.async_get(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, device_id)},
name=device_name,
manufacturer="Hikvision",
model=device_type,
)
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True

View File

@@ -185,19 +185,26 @@ class HikvisionBinarySensor(BinarySensorEntity):
# Build unique ID
self._attr_unique_id = f"{self._data.device_id}_{sensor_type}_{channel}"
# Build entity name based on device type
if self._data.device_type == "NVR":
self._attr_name = f"{sensor_type} {channel}"
else:
self._attr_name = sensor_type
# Device info for device registry
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._data.device_id)},
name=self._data.device_name,
manufacturer="Hikvision",
model=self._data.device_type,
)
if self._data.device_type == "NVR":
# NVR channels get their own device linked to the NVR via via_device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
via_device=(DOMAIN, self._data.device_id),
name=f"{self._data.device_name} Channel {channel}",
manufacturer="Hikvision",
model="NVR Channel",
)
self._attr_name = sensor_type
else:
# Single camera device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._data.device_id)},
name=self._data.device_name,
manufacturer="Hikvision",
model=self._data.device_type,
)
self._attr_name = sensor_type
# Set device class
self._attr_device_class = DEVICE_CLASS_MAP.get(sensor_type)

View File

@@ -0,0 +1,93 @@
"""Support for Hikvision cameras."""
from __future__ import annotations
from homeassistant.components.camera import Camera, CameraEntityFeature
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HikvisionConfigEntry
from .const import DOMAIN
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistant,
entry: HikvisionConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Hikvision cameras from a config entry."""
data = entry.runtime_data
camera = data.camera
# Get available channels from the library
channels = await hass.async_add_executor_job(camera.get_channels)
if channels:
entities = [HikvisionCamera(entry, channel) for channel in channels]
else:
# Fallback to single camera if no channels detected
entities = [HikvisionCamera(entry, 1)]
async_add_entities(entities)
class HikvisionCamera(Camera):
"""Representation of a Hikvision camera."""
_attr_has_entity_name = True
_attr_name = None
_attr_supported_features = CameraEntityFeature.STREAM
def __init__(
self,
entry: HikvisionConfigEntry,
channel: int,
) -> None:
"""Initialize the camera."""
super().__init__()
self._data = entry.runtime_data
self._channel = channel
self._camera = self._data.camera
# Build unique ID (unique per platform per integration)
self._attr_unique_id = f"{self._data.device_id}_{channel}"
# Device info for device registry
if self._data.device_type == "NVR":
# NVR channels get their own device linked to the NVR via via_device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
via_device=(DOMAIN, self._data.device_id),
name=f"{self._data.device_name} Channel {channel}",
manufacturer="Hikvision",
model="NVR Channel",
)
else:
# Single camera device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._data.device_id)},
name=self._data.device_name,
manufacturer="Hikvision",
model=self._data.device_type,
)
async def async_camera_image(
self, width: int | None = None, height: int | None = None
) -> bytes | None:
"""Return a still image from the camera."""
try:
return await self.hass.async_add_executor_job(
self._camera.get_snapshot, self._channel
)
except Exception as err:
raise HomeAssistantError(
f"Error getting image from {self._data.device_name} channel {self._channel}: {err}"
) from err
async def stream_source(self) -> str | None:
"""Return the stream source URL."""
return self._camera.get_stream_url(self._channel)

View File

@@ -220,31 +220,33 @@ def get_accessory( # noqa: C901
a_type = "TemperatureSensor"
elif device_class == SensorDeviceClass.HUMIDITY and unit == PERCENTAGE:
a_type = "HumiditySensor"
elif (
device_class == SensorDeviceClass.PM10
or SensorDeviceClass.PM10 in state.entity_id
):
elif device_class == SensorDeviceClass.PM10:
a_type = "PM10Sensor"
elif (
device_class == SensorDeviceClass.PM25
or SensorDeviceClass.PM25 in state.entity_id
):
elif device_class == SensorDeviceClass.PM25:
a_type = "PM25Sensor"
elif device_class == SensorDeviceClass.NITROGEN_DIOXIDE:
a_type = "NitrogenDioxideSensor"
elif device_class == SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS:
a_type = "VolatileOrganicCompoundsSensor"
elif (
device_class == SensorDeviceClass.GAS
or SensorDeviceClass.GAS in state.entity_id
):
elif device_class == SensorDeviceClass.GAS:
a_type = "AirQualitySensor"
elif device_class == SensorDeviceClass.CO:
a_type = "CarbonMonoxideSensor"
elif device_class == SensorDeviceClass.CO2 or "co2" in state.entity_id:
elif device_class == SensorDeviceClass.CO2:
a_type = "CarbonDioxideSensor"
elif device_class == SensorDeviceClass.ILLUMINANCE or unit == LIGHT_LUX:
a_type = "LightSensor"
# Fallbacks based on entity_id
elif SensorDeviceClass.PM10 in state.entity_id:
a_type = "PM10Sensor"
elif SensorDeviceClass.PM25 in state.entity_id:
a_type = "PM25Sensor"
elif SensorDeviceClass.GAS in state.entity_id:
a_type = "AirQualitySensor"
elif "co2" in state.entity_id:
a_type = "CarbonDioxideSensor"
else:
_LOGGER.debug(
"%s: Unsupported sensor type (device_class=%s) (unit=%s)",

View File

@@ -27,7 +27,7 @@ from .const import (
SUPPORTED_PLATFORMS_UI,
SUPPORTED_PLATFORMS_YAML,
)
from .expose import create_knx_exposure
from .expose import create_combined_knx_exposure
from .knx_module import KNXModule
from .project import STORAGE_KEY as PROJECT_STORAGE_KEY
from .schema import (
@@ -121,10 +121,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
hass.data[KNX_MODULE_KEY] = knx_module
if CONF_KNX_EXPOSE in config:
for expose_config in config[CONF_KNX_EXPOSE]:
knx_module.exposures.append(
create_knx_exposure(hass, knx_module.xknx, expose_config)
)
knx_module.yaml_exposures.extend(
create_combined_knx_exposure(hass, knx_module.xknx, config[CONF_KNX_EXPOSE])
)
configured_platforms_yaml = {
platform for platform in SUPPORTED_PLATFORMS_YAML if platform in config
}
@@ -149,7 +149,9 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
# if not loaded directly return
return True
for exposure in knx_module.exposures:
for exposure in knx_module.yaml_exposures:
exposure.async_remove()
for exposure in knx_module.service_exposures.values():
exposure.async_remove()
configured_platforms_yaml = {

View File

@@ -2,14 +2,22 @@
from __future__ import annotations
from collections.abc import Callable
from asyncio import TaskGroup
from collections.abc import Callable, Iterable
from dataclasses import dataclass
import logging
from typing import Any
from xknx import XKNX
from xknx.devices import DateDevice, DateTimeDevice, ExposeSensor, TimeDevice
from xknx.dpt import DPTNumeric, DPTString
from xknx.dpt import DPTBase, DPTNumeric, DPTString
from xknx.dpt.dpt_1 import DPT1BitEnum, DPTSwitch
from xknx.exceptions import ConversionError
from xknx.remote_value import RemoteValueSensor
from xknx.telegram.address import (
GroupAddress,
InternalGroupAddress,
parse_device_group_address,
)
from homeassistant.const import (
CONF_ENTITY_ID,
@@ -41,79 +49,159 @@ _LOGGER = logging.getLogger(__name__)
@callback
def create_knx_exposure(
hass: HomeAssistant, xknx: XKNX, config: ConfigType
) -> KNXExposeSensor | KNXExposeTime:
"""Create exposures from config."""
) -> KnxExposeEntity | KnxExposeTime:
"""Create single exposure."""
expose_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
exposure: KNXExposeSensor | KNXExposeTime
exposure: KnxExposeEntity | KnxExposeTime
if (
isinstance(expose_type, str)
and expose_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES
):
exposure = KNXExposeTime(
exposure = KnxExposeTime(
xknx=xknx,
config=config,
)
else:
exposure = KNXExposeSensor(
hass,
exposure = KnxExposeEntity(
hass=hass,
xknx=xknx,
config=config,
entity_id=config[CONF_ENTITY_ID],
options=(_yaml_config_to_expose_options(config),),
)
exposure.async_register()
return exposure
class KNXExposeSensor:
"""Object to Expose Home Assistant entity to KNX bus."""
@callback
def create_combined_knx_exposure(
hass: HomeAssistant, xknx: XKNX, configs: list[ConfigType]
) -> list[KnxExposeEntity | KnxExposeTime]:
"""Create exposures from YAML config combined by entity_id."""
exposures: list[KnxExposeEntity | KnxExposeTime] = []
entity_exposure_map: dict[str, list[KnxExposeOptions]] = {}
for config in configs:
value_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
if value_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES:
time_exposure = KnxExposeTime(
xknx=xknx,
config=config,
)
time_exposure.async_register()
exposures.append(time_exposure)
continue
entity_id = config[CONF_ENTITY_ID]
option = _yaml_config_to_expose_options(config)
entity_exposure_map.setdefault(entity_id, []).append(option)
for entity_id, options in entity_exposure_map.items():
entity_exposure = KnxExposeEntity(
hass=hass,
xknx=xknx,
entity_id=entity_id,
options=options,
)
entity_exposure.async_register()
exposures.append(entity_exposure)
return exposures
@dataclass(slots=True)
class KnxExposeOptions:
"""Options for KNX Expose."""
attribute: str | None
group_address: GroupAddress | InternalGroupAddress
dpt: type[DPTBase]
respond_to_read: bool
cooldown: float
default: Any | None
value_template: Template | None
def _yaml_config_to_expose_options(config: ConfigType) -> KnxExposeOptions:
"""Convert single yaml expose config to KnxExposeOptions."""
value_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
dpt: type[DPTBase]
if value_type == "binary":
# HA yaml expose flag for DPT-1 (no explicit DPT 1 definitions in xknx back then)
dpt = DPTSwitch
else:
dpt = DPTBase.parse_transcoder(config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]) # type: ignore[assignment] # checked by schema validation
ga = parse_device_group_address(config[KNX_ADDRESS])
return KnxExposeOptions(
attribute=config.get(ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE),
group_address=ga,
dpt=dpt,
respond_to_read=config[CONF_RESPOND_TO_READ],
cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN],
default=config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT),
value_template=config.get(CONF_VALUE_TEMPLATE),
)
class KnxExposeEntity:
"""Expose Home Assistant entity values to KNX bus."""
def __init__(
self,
hass: HomeAssistant,
xknx: XKNX,
config: ConfigType,
entity_id: str,
options: Iterable[KnxExposeOptions],
) -> None:
"""Initialize of Expose class."""
"""Initialize KnxExposeEntity class."""
self.hass = hass
self.xknx = xknx
self.entity_id: str = config[CONF_ENTITY_ID]
self.expose_attribute: str | None = config.get(
ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE
)
self.expose_default = config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT)
self.expose_type: int | str = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
self.value_template: Template | None = config.get(CONF_VALUE_TEMPLATE)
self.entity_id = entity_id
self._remove_listener: Callable[[], None] | None = None
self.device: ExposeSensor = ExposeSensor(
xknx=self.xknx,
name=f"{self.entity_id}__{self.expose_attribute or 'state'}",
group_address=config[KNX_ADDRESS],
respond_to_read=config[CONF_RESPOND_TO_READ],
value_type=self.expose_type,
cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN],
self._exposures = tuple(
(
option,
ExposeSensor(
xknx=self.xknx,
name=f"{self.entity_id} {option.attribute or 'state'}",
group_address=option.group_address,
respond_to_read=option.respond_to_read,
value_type=option.dpt,
cooldown=option.cooldown,
),
)
for option in options
)
@property
def name(self) -> str:
"""Return name of the expose entity."""
expose_names = [opt.attribute or "state" for opt, _ in self._exposures]
return f"{self.entity_id}__{'__'.join(expose_names)}"
@callback
def async_register(self) -> None:
"""Register listener."""
"""Register listener and XKNX devices."""
self._remove_listener = async_track_state_change_event(
self.hass, [self.entity_id], self._async_entity_changed
)
self.xknx.devices.async_add(self.device)
for _option, xknx_expose in self._exposures:
self.xknx.devices.async_add(xknx_expose)
self._init_expose_state()
@callback
def _init_expose_state(self) -> None:
"""Initialize state of the exposure."""
"""Initialize state of all exposures."""
init_state = self.hass.states.get(self.entity_id)
state_value = self._get_expose_value(init_state)
try:
self.device.sensor_value.value = state_value
except ConversionError:
_LOGGER.exception("Error during sending of expose sensor value")
for option, xknx_expose in self._exposures:
state_value = self._get_expose_value(init_state, option)
try:
xknx_expose.sensor_value.value = state_value
except ConversionError:
_LOGGER.exception(
"Error setting value %s for expose sensor %s",
state_value,
xknx_expose.name,
)
@callback
def async_remove(self) -> None:
@@ -121,53 +209,57 @@ class KNXExposeSensor:
if self._remove_listener is not None:
self._remove_listener()
self._remove_listener = None
self.xknx.devices.async_remove(self.device)
for _option, xknx_expose in self._exposures:
self.xknx.devices.async_remove(xknx_expose)
def _get_expose_value(self, state: State | None) -> bool | int | float | str | None:
"""Extract value from state."""
def _get_expose_value(
self, state: State | None, option: KnxExposeOptions
) -> bool | int | float | str | None:
"""Extract value from state for a specific option."""
if state is None or state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
if self.expose_default is None:
if option.default is None:
return None
value = self.expose_default
elif self.expose_attribute is not None:
_attr = state.attributes.get(self.expose_attribute)
value = _attr if _attr is not None else self.expose_default
value = option.default
elif option.attribute is not None:
_attr = state.attributes.get(option.attribute)
value = _attr if _attr is not None else option.default
else:
value = state.state
if self.value_template is not None:
if option.value_template is not None:
try:
value = self.value_template.async_render_with_possible_json_value(
value = option.value_template.async_render_with_possible_json_value(
value, error_value=None
)
except (TemplateError, TypeError, ValueError) as err:
_LOGGER.warning(
"Error rendering value template for KNX expose %s %s: %s",
self.device.name,
self.value_template.template,
"Error rendering value template for KNX expose %s %s %s: %s",
self.entity_id,
option.attribute or "state",
option.value_template.template,
err,
)
return None
if self.expose_type == "binary":
if issubclass(option.dpt, DPT1BitEnum):
if value in (1, STATE_ON, "True"):
return True
if value in (0, STATE_OFF, "False"):
return False
if value is not None and (
isinstance(self.device.sensor_value, RemoteValueSensor)
):
# Handle numeric and string DPT conversions
if value is not None:
try:
if issubclass(self.device.sensor_value.dpt_class, DPTNumeric):
if issubclass(option.dpt, DPTNumeric):
return float(value)
if issubclass(self.device.sensor_value.dpt_class, DPTString):
if issubclass(option.dpt, DPTString):
# DPT 16.000 only allows up to 14 Bytes
return str(value)[:14]
except (ValueError, TypeError) as err:
_LOGGER.warning(
'Could not expose %s %s value "%s" to KNX: Conversion failed: %s',
self.entity_id,
self.expose_attribute or "state",
option.attribute or "state",
value,
err,
)
@@ -175,32 +267,31 @@ class KNXExposeSensor:
return value # type: ignore[no-any-return]
async def _async_entity_changed(self, event: Event[EventStateChangedData]) -> None:
"""Handle entity change."""
"""Handle entity change for all options."""
new_state = event.data["new_state"]
if (new_value := self._get_expose_value(new_state)) is None:
return
old_state = event.data["old_state"]
# don't use default value for comparison on first state change (old_state is None)
old_value = self._get_expose_value(old_state) if old_state is not None else None
# don't send same value sequentially
if new_value != old_value:
await self._async_set_knx_value(new_value)
async with TaskGroup() as tg:
for option, xknx_expose in self._exposures:
expose_value = self._get_expose_value(new_state, option)
if expose_value is None:
continue
tg.create_task(self._async_set_knx_value(xknx_expose, expose_value))
async def _async_set_knx_value(self, value: StateType) -> None:
async def _async_set_knx_value(
self, xknx_expose: ExposeSensor, value: StateType
) -> None:
"""Set new value on xknx ExposeSensor."""
try:
await self.device.set(value)
await xknx_expose.set(value, skip_unchanged=True)
except ConversionError as err:
_LOGGER.warning(
'Could not expose %s %s value "%s" to KNX: %s',
self.entity_id,
self.expose_attribute or "state",
'Could not expose %s value "%s" to KNX: %s',
xknx_expose.name,
value,
err,
)
class KNXExposeTime:
class KnxExposeTime:
"""Object to Expose Time/Date object to KNX bus."""
def __init__(self, xknx: XKNX, config: ConfigType) -> None:
@@ -222,6 +313,11 @@ class KNXExposeTime:
group_address=config[KNX_ADDRESS],
)
@property
def name(self) -> str:
"""Return name of the time expose object."""
return f"expose_{self.device.name}"
@callback
def async_register(self) -> None:
"""Register listener."""

View File

@@ -54,7 +54,7 @@ from .const import (
TELEGRAM_LOG_DEFAULT,
)
from .device import KNXInterfaceDevice
from .expose import KNXExposeSensor, KNXExposeTime
from .expose import KnxExposeEntity, KnxExposeTime
from .project import KNXProject
from .repairs import data_secure_group_key_issue_dispatcher
from .storage.config_store import KNXConfigStore
@@ -73,8 +73,8 @@ class KNXModule:
self.hass = hass
self.config_yaml = config
self.connected = False
self.exposures: list[KNXExposeSensor | KNXExposeTime] = []
self.service_exposures: dict[str, KNXExposeSensor | KNXExposeTime] = {}
self.yaml_exposures: list[KnxExposeEntity | KnxExposeTime] = []
self.service_exposures: dict[str, KnxExposeEntity | KnxExposeTime] = {}
self.entry = entry
self.project = KNXProject(hass=hass, entry=entry)

View File

@@ -11,7 +11,7 @@
"loggers": ["xknx", "xknxproject"],
"quality_scale": "platinum",
"requirements": [
"xknx==3.13.0",
"xknx==3.14.0",
"xknxproject==3.8.2",
"knx-frontend==2025.12.30.151231"
],

View File

@@ -193,7 +193,7 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
" for '%s' - %s"
),
group_address,
replaced_exposure.device.name,
replaced_exposure.name,
)
replaced_exposure.async_remove()
exposure = create_knx_exposure(knx_module.hass, knx_module.xknx, call.data)
@@ -201,7 +201,7 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
_LOGGER.debug(
"Service exposure_register registered exposure for '%s' - %s",
group_address,
exposure.device.name,
exposure.name,
)

View File

@@ -42,7 +42,7 @@
},
"conditions": {
"is_off": {
"description": "Test if a light is off.",
"description": "Tests if one or more lights are off.",
"fields": {
"behavior": {
"description": "[%key:component::light::common::condition_behavior_description%]",
@@ -52,7 +52,7 @@
"name": "If a light is off"
},
"is_on": {
"description": "Test if a light is on.",
"description": "Tests if one or more lights are on.",
"fields": {
"behavior": {
"description": "[%key:component::light::common::condition_behavior_description%]",

View File

@@ -7,7 +7,6 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["london_tube_status"],
"quality_scale": "legacy",
"requirements": ["london-tube-status==0.5"],
"single_config_entry": true
}

View File

@@ -528,7 +528,10 @@ DISCOVERY_SCHEMAS = [
),
),
entity_class=MatterBinarySensor,
required_attributes=(clusters.Thermostat.Attributes.RemoteSensing,),
required_attributes=(
clusters.Thermostat.Attributes.RemoteSensing,
clusters.Thermostat.Attributes.OutdoorTemperature,
),
allow_multi=True,
),
MatterDiscoverySchema(

View File

@@ -642,6 +642,7 @@ DISCOVERY_SCHEMAS = [
list_attribute=clusters.DoorLock.Attributes.SupportedOperatingModes,
device_to_ha=DOOR_LOCK_OPERATING_MODE_MAP.get,
ha_to_device=DOOR_LOCK_OPERATING_MODE_MAP_REVERSE.get,
entity_category=EntityCategory.CONFIG,
),
entity_class=MatterDoorLockOperatingModeSelectEntity,
required_attributes=(

View File

@@ -4,45 +4,70 @@ from __future__ import annotations
import asyncio
from datetime import timedelta
import logging
from typing import Any
from http import HTTPStatus
from aiohttp import ClientConnectionError, ClientResponseError
from pymelcloud import Device, get_devices
from pymelcloud.atw_device import Zone
from pymelcloud import get_devices
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_TOKEN, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.util import Throttle
from homeassistant.helpers.update_coordinator import UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
type MelCloudConfigEntry = ConfigEntry[dict[str, list[MelCloudDevice]]]
async def async_setup_entry(hass: HomeAssistant, entry: MelCloudConfigEntry) -> bool:
"""Establish connection with MELCloud."""
conf = entry.data
try:
mel_devices = await mel_devices_setup(hass, conf[CONF_TOKEN])
async with asyncio.timeout(10):
all_devices = await get_devices(
token=entry.data[CONF_TOKEN],
session=async_get_clientsession(hass),
conf_update_interval=timedelta(minutes=30),
device_set_debounce=timedelta(seconds=2),
)
except ClientResponseError as ex:
if isinstance(ex, ClientResponseError) and ex.code == 401:
if ex.status in (HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN):
raise ConfigEntryAuthFailed from ex
raise ConfigEntryNotReady from ex
if ex.status == HTTPStatus.TOO_MANY_REQUESTS:
raise UpdateFailed(
"MELCloud rate limit exceeded. Your account may be temporarily blocked"
) from ex
raise UpdateFailed(f"Error communicating with MELCloud: {ex}") from ex
except (TimeoutError, ClientConnectionError) as ex:
raise ConfigEntryNotReady from ex
raise UpdateFailed(f"Error communicating with MELCloud: {ex}") from ex
entry.runtime_data = mel_devices
# Create per-device coordinators
coordinators: dict[str, list[MelCloudDeviceUpdateCoordinator]] = {}
device_registry = dr.async_get(hass)
for device_type, devices in all_devices.items():
# Build coordinators for this device_type
coordinators[device_type] = [
MelCloudDeviceUpdateCoordinator(hass, device, entry) for device in devices
]
# Perform initial refreshes concurrently
await asyncio.gather(
*(
coordinator.async_config_entry_first_refresh()
for coordinator in coordinators[device_type]
)
)
# Register parent devices so zone entities can reference via_device
for coordinator in coordinators[device_type]:
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
**coordinator.device_info,
)
entry.runtime_data = coordinators
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True
@@ -50,90 +75,3 @@ async def async_setup_entry(hass: HomeAssistant, entry: MelCloudConfigEntry) ->
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
class MelCloudDevice:
"""MELCloud Device instance."""
def __init__(self, device: Device) -> None:
"""Construct a device wrapper."""
self.device = device
self.name = device.name
self._available = True
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def async_update(self, **kwargs):
"""Pull the latest data from MELCloud."""
try:
await self.device.update()
self._available = True
except ClientConnectionError:
_LOGGER.warning("Connection failed for %s", self.name)
self._available = False
async def async_set(self, properties: dict[str, Any]):
"""Write state changes to the MELCloud API."""
try:
await self.device.set(properties)
self._available = True
except ClientConnectionError:
_LOGGER.warning("Connection failed for %s", self.name)
self._available = False
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._available
@property
def device_id(self):
"""Return device ID."""
return self.device.device_id
@property
def building_id(self):
"""Return building ID of the device."""
return self.device.building_id
@property
def device_info(self) -> DeviceInfo:
"""Return a device description for device registry."""
model = None
if (unit_infos := self.device.units) is not None:
model = ", ".join([x["model"] for x in unit_infos if x["model"]])
return DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, self.device.mac)},
identifiers={(DOMAIN, f"{self.device.mac}-{self.device.serial}")},
manufacturer="Mitsubishi Electric",
model=model,
name=self.name,
)
def zone_device_info(self, zone: Zone) -> DeviceInfo:
"""Return a zone device description for device registry."""
dev = self.device
return DeviceInfo(
identifiers={(DOMAIN, f"{dev.mac}-{dev.serial}-{zone.zone_index}")},
manufacturer="Mitsubishi Electric",
model="ATW zone device",
name=f"{self.name} {zone.name}",
via_device=(DOMAIN, f"{dev.mac}-{dev.serial}"),
)
async def mel_devices_setup(
hass: HomeAssistant, token: str
) -> dict[str, list[MelCloudDevice]]:
"""Query connected devices from MELCloud."""
session = async_get_clientsession(hass)
async with asyncio.timeout(10):
all_devices = await get_devices(
token,
session,
conf_update_interval=timedelta(minutes=30),
device_set_debounce=timedelta(seconds=2),
)
wrapped_devices: dict[str, list[MelCloudDevice]] = {}
for device_type, devices in all_devices.items():
wrapped_devices[device_type] = [MelCloudDevice(device) for device in devices]
return wrapped_devices

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from datetime import timedelta
from typing import Any, cast
from pymelcloud import DEVICE_TYPE_ATA, DEVICE_TYPE_ATW, AtaDevice, AtwDevice
@@ -29,7 +28,6 @@ from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv, entity_platform
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import MelCloudConfigEntry, MelCloudDevice
from .const import (
ATTR_STATUS,
ATTR_VANE_HORIZONTAL,
@@ -40,9 +38,8 @@ from .const import (
SERVICE_SET_VANE_HORIZONTAL,
SERVICE_SET_VANE_VERTICAL,
)
SCAN_INTERVAL = timedelta(seconds=60)
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
from .entity import MelCloudEntity
ATA_HVAC_MODE_LOOKUP = {
ata.OPERATION_MODE_HEAT: HVACMode.HEAT,
@@ -74,27 +71,24 @@ ATW_ZONE_HVAC_ACTION_LOOKUP = {
async def async_setup_entry(
hass: HomeAssistant,
_hass: HomeAssistant,
entry: MelCloudConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up MelCloud device climate based on config_entry."""
mel_devices = entry.runtime_data
coordinators = entry.runtime_data
entities: list[AtaDeviceClimate | AtwDeviceZoneClimate] = [
AtaDeviceClimate(mel_device, mel_device.device)
for mel_device in mel_devices[DEVICE_TYPE_ATA]
AtaDeviceClimate(coordinator, coordinator.device)
for coordinator in coordinators.get(DEVICE_TYPE_ATA, [])
]
entities.extend(
[
AtwDeviceZoneClimate(mel_device, mel_device.device, zone)
for mel_device in mel_devices[DEVICE_TYPE_ATW]
for zone in mel_device.device.zones
AtwDeviceZoneClimate(coordinator, coordinator.device, zone)
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
for zone in coordinator.device.zones
]
)
async_add_entities(
entities,
True,
)
async_add_entities(entities)
platform = entity_platform.async_get_current_platform()
platform.async_register_entity_service(
@@ -109,21 +103,19 @@ async def async_setup_entry(
)
class MelCloudClimate(ClimateEntity):
class MelCloudClimate(MelCloudEntity, ClimateEntity):
"""Base climate device."""
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_has_entity_name = True
_attr_name = None
def __init__(self, device: MelCloudDevice) -> None:
def __init__(
self,
coordinator: MelCloudDeviceUpdateCoordinator,
) -> None:
"""Initialize the climate."""
self.api = device
self._base_device = self.api.device
async def async_update(self) -> None:
"""Update state from MELCloud."""
await self.api.async_update()
super().__init__(coordinator)
self._base_device = self.coordinator.device
@property
def target_temperature_step(self) -> float | None:
@@ -142,26 +134,29 @@ class AtaDeviceClimate(MelCloudClimate):
| ClimateEntityFeature.TURN_ON
)
def __init__(self, device: MelCloudDevice, ata_device: AtaDevice) -> None:
def __init__(
self,
coordinator: MelCloudDeviceUpdateCoordinator,
ata_device: AtaDevice,
) -> None:
"""Initialize the climate."""
super().__init__(device)
super().__init__(coordinator)
self._device = ata_device
self._attr_unique_id = f"{self.api.device.serial}-{self.api.device.mac}"
self._attr_device_info = self.api.device_info
self._attr_unique_id = (
f"{self.coordinator.device.serial}-{self.coordinator.device.mac}"
)
self._attr_device_info = self.coordinator.device_info
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
# We can only check for vane_horizontal once we fetch the device data from the cloud
# Add horizontal swing if device supports it
if self._device.vane_horizontal:
self._attr_supported_features |= ClimateEntityFeature.SWING_HORIZONTAL_MODE
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
"""Return the optional state attributes with device specific additions."""
attr = {}
attr: dict[str, Any] = {}
attr.update(self.coordinator.extra_attributes)
if vane_horizontal := self._device.vane_horizontal:
attr.update(
@@ -208,7 +203,7 @@ class AtaDeviceClimate(MelCloudClimate):
"""Set new target hvac mode."""
set_dict: dict[str, Any] = {}
self._apply_set_hvac_mode(hvac_mode, set_dict)
await self._device.set(set_dict)
await self.coordinator.async_set(set_dict)
@property
def hvac_modes(self) -> list[HVACMode]:
@@ -241,7 +236,7 @@ class AtaDeviceClimate(MelCloudClimate):
set_dict["target_temperature"] = kwargs.get(ATTR_TEMPERATURE)
if set_dict:
await self._device.set(set_dict)
await self.coordinator.async_set(set_dict)
@property
def fan_mode(self) -> str | None:
@@ -250,7 +245,7 @@ class AtaDeviceClimate(MelCloudClimate):
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
await self._device.set({"fan_speed": fan_mode})
await self.coordinator.async_set({"fan_speed": fan_mode})
@property
def fan_modes(self) -> list[str] | None:
@@ -264,7 +259,7 @@ class AtaDeviceClimate(MelCloudClimate):
f"Invalid horizontal vane position {position}. Valid positions:"
f" [{self._device.vane_horizontal_positions}]."
)
await self._device.set({ata.PROPERTY_VANE_HORIZONTAL: position})
await self.coordinator.async_set({ata.PROPERTY_VANE_HORIZONTAL: position})
async def async_set_vane_vertical(self, position: str) -> None:
"""Set vertical vane position."""
@@ -273,7 +268,7 @@ class AtaDeviceClimate(MelCloudClimate):
f"Invalid vertical vane position {position}. Valid positions:"
f" [{self._device.vane_vertical_positions}]."
)
await self._device.set({ata.PROPERTY_VANE_VERTICAL: position})
await self.coordinator.async_set({ata.PROPERTY_VANE_VERTICAL: position})
@property
def swing_mode(self) -> str | None:
@@ -305,11 +300,11 @@ class AtaDeviceClimate(MelCloudClimate):
async def async_turn_on(self) -> None:
"""Turn the entity on."""
await self._device.set({"power": True})
await self.coordinator.async_set({"power": True})
async def async_turn_off(self) -> None:
"""Turn the entity off."""
await self._device.set({"power": False})
await self.coordinator.async_set({"power": False})
@property
def min_temp(self) -> float:
@@ -338,15 +333,18 @@ class AtwDeviceZoneClimate(MelCloudClimate):
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
def __init__(
self, device: MelCloudDevice, atw_device: AtwDevice, atw_zone: Zone
self,
coordinator: MelCloudDeviceUpdateCoordinator,
atw_device: AtwDevice,
atw_zone: Zone,
) -> None:
"""Initialize the climate."""
super().__init__(device)
super().__init__(coordinator)
self._device = atw_device
self._zone = atw_zone
self._attr_unique_id = f"{self.api.device.serial}-{atw_zone.zone_index}"
self._attr_device_info = self.api.zone_device_info(atw_zone)
self._attr_unique_id = f"{self.coordinator.device.serial}-{atw_zone.zone_index}"
self._attr_device_info = self.coordinator.zone_device_info(atw_zone)
@property
def extra_state_attributes(self) -> dict[str, Any]:
@@ -360,15 +358,16 @@ class AtwDeviceZoneClimate(MelCloudClimate):
@property
def hvac_mode(self) -> HVACMode:
"""Return hvac operation ie. heat, cool mode."""
mode = self._zone.operation_mode
if not self._device.power or mode is None:
# Use zone status (heat/cool/idle) not operation_mode (heat-thermostat/etc.)
status = self._zone.status
if not self._device.power or status is None:
return HVACMode.OFF
return ATW_ZONE_HVAC_MODE_LOOKUP.get(mode, HVACMode.OFF)
return ATW_ZONE_HVAC_MODE_LOOKUP.get(status, HVACMode.OFF)
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set new target hvac mode."""
if hvac_mode == HVACMode.OFF:
await self._device.set({"power": False})
await self.coordinator.async_set({"power": False})
return
operation_mode = ATW_ZONE_HVAC_MODE_REVERSE_LOOKUP.get(hvac_mode)
@@ -381,7 +380,7 @@ class AtwDeviceZoneClimate(MelCloudClimate):
props = {PROPERTY_ZONE_2_OPERATION_MODE: operation_mode}
if self.hvac_mode == HVACMode.OFF:
props["power"] = True
await self._device.set(props)
await self.coordinator.async_set(props)
@property
def hvac_modes(self) -> list[HVACMode]:
@@ -410,3 +409,4 @@ class AtwDeviceZoneClimate(MelCloudClimate):
await self._zone.set_target_temperature(
kwargs.get(ATTR_TEMPERATURE, self.target_temperature)
)
await self.coordinator.async_request_refresh()

View File

@@ -5,7 +5,6 @@ from __future__ import annotations
import asyncio
from collections.abc import Mapping
from http import HTTPStatus
import logging
from typing import Any
from aiohttp import ClientError, ClientResponseError
@@ -18,8 +17,6 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
class FlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a config flow."""
@@ -37,8 +34,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
async def _create_client(
self,
username: str,
*,
password: str | None = None,
password: str,
token: str | None = None,
) -> ConfigFlowResult:
"""Create client."""
@@ -46,13 +42,13 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
async with asyncio.timeout(10):
if (acquired_token := token) is None:
acquired_token = await pymelcloud.login(
username,
password,
async_get_clientsession(self.hass),
email=username,
password=password,
session=async_get_clientsession(self.hass),
)
await pymelcloud.get_devices(
acquired_token,
async_get_clientsession(self.hass),
token=acquired_token,
session=async_get_clientsession(self.hass),
)
except ClientResponseError as err:
if err.status in (HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN):
@@ -60,6 +56,10 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
return self.async_abort(reason="cannot_connect")
except (TimeoutError, ClientError):
return self.async_abort(reason="cannot_connect")
except AttributeError:
# python-melcloud library bug: login() raises AttributeError on invalid
# credentials when API response doesn't contain expected "LoginData" key
return self.async_abort(reason="invalid_auth")
return await self._create_entry(username, acquired_token)
@@ -74,8 +74,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
),
)
username = user_input[CONF_USERNAME]
return await self._create_client(username, password=user_input[CONF_PASSWORD])
return await self._create_client(
username=user_input[CONF_USERNAME], password=user_input[CONF_PASSWORD]
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
@@ -114,9 +115,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
try:
async with asyncio.timeout(10):
acquired_token = await pymelcloud.login(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
async_get_clientsession(self.hass),
email=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=async_get_clientsession(self.hass),
)
except (ClientResponseError, AttributeError) as err:
if (
@@ -130,10 +131,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
errors["base"] = "invalid_auth"
else:
errors["base"] = "cannot_connect"
except (
TimeoutError,
ClientError,
):
except (TimeoutError, ClientError):
errors["base"] = "cannot_connect"
return acquired_token, errors
@@ -151,9 +149,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
try:
async with asyncio.timeout(10):
acquired_token = await pymelcloud.login(
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
async_get_clientsession(self.hass),
email=user_input[CONF_USERNAME],
password=user_input[CONF_PASSWORD],
session=async_get_clientsession(self.hass),
)
except (ClientResponseError, AttributeError) as err:
if (

View File

@@ -0,0 +1,193 @@
"""DataUpdateCoordinator for the MELCloud integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from typing import Any
from aiohttp import ClientConnectionError, ClientResponseError
from pymelcloud import Device
from pymelcloud.atw_device import Zone
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.debounce import Debouncer
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
# Delay before refreshing after a state change to allow device to process
# and avoid race conditions with rapid sequential changes
REQUEST_REFRESH_DELAY = 1.5
# Default update interval in minutes (matches upstream Throttle value)
DEFAULT_UPDATE_INTERVAL = 15
# Retry interval in seconds for transient failures
RETRY_INTERVAL_SECONDS = 30
# Number of consecutive failures before marking device unavailable
MAX_CONSECUTIVE_FAILURES = 3
class MelCloudDeviceUpdateCoordinator(DataUpdateCoordinator[None]):
"""Per-device coordinator for MELCloud data updates."""
def __init__(
self,
hass: HomeAssistant,
device: Device,
config_entry: ConfigEntry,
) -> None:
"""Initialize the per-device coordinator."""
self.device = device
self.device_available = True
self._consecutive_failures = 0
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=f"{DOMAIN}_{device.name}",
update_interval=timedelta(minutes=DEFAULT_UPDATE_INTERVAL),
always_update=True,
request_refresh_debouncer=Debouncer(
hass,
_LOGGER,
cooldown=REQUEST_REFRESH_DELAY,
immediate=False,
),
)
@property
def extra_attributes(self) -> dict[str, Any]:
"""Return extra device attributes."""
data: dict[str, Any] = {
"device_id": self.device.device_id,
"serial": self.device.serial,
"mac": self.device.mac,
}
if (unit_infos := self.device.units) is not None:
for i, unit in enumerate(unit_infos[:2]):
data[f"unit_{i}_model"] = unit.get("model")
data[f"unit_{i}_serial"] = unit.get("serial")
return data
@property
def device_id(self) -> str:
"""Return device ID."""
return self.device.device_id
@property
def building_id(self) -> str:
"""Return building ID of the device."""
return self.device.building_id
@property
def device_info(self) -> DeviceInfo:
"""Return a device description for device registry."""
model = None
if (unit_infos := self.device.units) is not None:
model = ", ".join([x["model"] for x in unit_infos if x["model"]])
return DeviceInfo(
connections={(CONNECTION_NETWORK_MAC, self.device.mac)},
identifiers={(DOMAIN, f"{self.device.mac}-{self.device.serial}")},
manufacturer="Mitsubishi Electric",
model=model,
name=self.device.name,
)
def zone_device_info(self, zone: Zone) -> DeviceInfo:
"""Return a zone device description for device registry."""
dev = self.device
return DeviceInfo(
identifiers={(DOMAIN, f"{dev.mac}-{dev.serial}-{zone.zone_index}")},
manufacturer="Mitsubishi Electric",
model="ATW zone device",
name=f"{self.device.name} {zone.name}",
via_device=(DOMAIN, f"{dev.mac}-{dev.serial}"),
)
async def _async_update_data(self) -> None:
"""Fetch data for this specific device from MELCloud."""
try:
await self.device.update()
# Success - reset failure counter and restore normal interval
if self._consecutive_failures > 0:
_LOGGER.info(
"Connection restored for %s after %d failed attempt(s)",
self.device.name,
self._consecutive_failures,
)
self._consecutive_failures = 0
self.update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL)
self.device_available = True
except ClientResponseError as ex:
if ex.status in (401, 403):
raise ConfigEntryAuthFailed from ex
if ex.status == 429:
_LOGGER.error(
"MELCloud rate limit exceeded for %s. Your account may be "
"temporarily blocked",
self.device.name,
)
# Rate limit - mark unavailable immediately
self.device_available = False
raise UpdateFailed(
f"Rate limit exceeded for {self.device.name}"
) from ex
# Other HTTP errors - use retry logic
self._handle_failure(f"Error updating {self.device.name}: {ex}", ex)
except ClientConnectionError as ex:
self._handle_failure(f"Connection failed for {self.device.name}: {ex}", ex)
def _handle_failure(self, message: str, exception: Exception | None = None) -> None:
"""Handle a connection failure with retry logic.
For transient failures, entities remain available with their last known
values for up to MAX_CONSECUTIVE_FAILURES attempts. During retries, the
update interval is shortened to RETRY_INTERVAL_SECONDS for faster recovery.
After the threshold is reached, entities are marked unavailable.
"""
self._consecutive_failures += 1
if self._consecutive_failures < MAX_CONSECUTIVE_FAILURES:
# Keep entities available with cached data, use shorter retry interval
_LOGGER.warning(
"%s (attempt %d/%d, retrying in %ds)",
message,
self._consecutive_failures,
MAX_CONSECUTIVE_FAILURES,
RETRY_INTERVAL_SECONDS,
)
self.update_interval = timedelta(seconds=RETRY_INTERVAL_SECONDS)
else:
# Threshold reached - mark unavailable and restore normal interval
_LOGGER.warning(
"%s (attempt %d/%d, marking unavailable)",
message,
self._consecutive_failures,
MAX_CONSECUTIVE_FAILURES,
)
self.device_available = False
self.update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL)
raise UpdateFailed(message) from exception
async def async_set(self, properties: dict[str, Any]) -> None:
"""Write state changes to the MELCloud API."""
try:
await self.device.set(properties)
self.device_available = True
except ClientConnectionError:
_LOGGER.warning("Connection failed for %s", self.device.name)
self.device_available = False
await self.async_request_refresh()
type MelCloudConfigEntry = ConfigEntry[dict[str, list[MelCloudDeviceUpdateCoordinator]]]

View File

@@ -9,7 +9,7 @@ from homeassistant.const import CONF_TOKEN, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import MelCloudConfigEntry
from .coordinator import MelCloudConfigEntry
TO_REDACT = {
CONF_USERNAME,

View File

@@ -0,0 +1,18 @@
"""Base entity for MELCloud integration."""
from __future__ import annotations
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .coordinator import MelCloudDeviceUpdateCoordinator
class MelCloudEntity(CoordinatorEntity[MelCloudDeviceUpdateCoordinator]):
"""Base class for MELCloud entities."""
_attr_has_entity_name = True
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available and self.coordinator.device_available

View File

@@ -6,6 +6,6 @@
"documentation": "https://www.home-assistant.io/integrations/melcloud",
"integration_type": "device",
"iot_class": "cloud_polling",
"loggers": ["pymelcloud"],
"loggers": ["melcloud"],
"requirements": ["python-melcloud==0.1.2"]
}

View File

@@ -19,7 +19,8 @@ from homeassistant.const import UnitOfEnergy, UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import MelCloudConfigEntry, MelCloudDevice
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
from .entity import MelCloudEntity
@dataclasses.dataclass(frozen=True, kw_only=True)
@@ -111,70 +112,67 @@ ATW_ZONE_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
async def async_setup_entry(
hass: HomeAssistant,
_hass: HomeAssistant,
entry: MelCloudConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up MELCloud device sensors based on config_entry."""
mel_devices = entry.runtime_data
coordinators = entry.runtime_data
entities: list[MelDeviceSensor] = [
MelDeviceSensor(mel_device, description)
MelDeviceSensor(coordinator, description)
for description in ATA_SENSORS
for mel_device in mel_devices[DEVICE_TYPE_ATA]
if description.enabled(mel_device)
for coordinator in coordinators.get(DEVICE_TYPE_ATA, [])
if description.enabled(coordinator)
] + [
MelDeviceSensor(mel_device, description)
MelDeviceSensor(coordinator, description)
for description in ATW_SENSORS
for mel_device in mel_devices[DEVICE_TYPE_ATW]
if description.enabled(mel_device)
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
if description.enabled(coordinator)
]
entities.extend(
[
AtwZoneSensor(mel_device, zone, description)
for mel_device in mel_devices[DEVICE_TYPE_ATW]
for zone in mel_device.device.zones
AtwZoneSensor(coordinator, zone, description)
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
for zone in coordinator.device.zones
for description in ATW_ZONE_SENSORS
if description.enabled(zone)
]
)
async_add_entities(entities, True)
async_add_entities(entities)
class MelDeviceSensor(SensorEntity):
class MelDeviceSensor(MelCloudEntity, SensorEntity):
"""Representation of a Sensor."""
entity_description: MelcloudSensorEntityDescription
_attr_has_entity_name = True
def __init__(
self,
api: MelCloudDevice,
coordinator: MelCloudDeviceUpdateCoordinator,
description: MelcloudSensorEntityDescription,
) -> None:
"""Initialize the sensor."""
self._api = api
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{api.device.serial}-{api.device.mac}-{description.key}"
self._attr_device_info = api.device_info
self._attr_unique_id = (
f"{coordinator.device.serial}-{coordinator.device.mac}-{description.key}"
)
self._attr_device_info = coordinator.device_info
@property
def native_value(self) -> float | None:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self._api)
async def async_update(self) -> None:
"""Retrieve latest state."""
await self._api.async_update()
return self.entity_description.value_fn(self.coordinator)
class AtwZoneSensor(MelDeviceSensor):
"""Air-to-Air device sensor."""
"""Air-to-Water zone sensor."""
def __init__(
self,
api: MelCloudDevice,
coordinator: MelCloudDeviceUpdateCoordinator,
zone: Zone,
description: MelcloudSensorEntityDescription,
) -> None:
@@ -184,9 +182,9 @@ class AtwZoneSensor(MelDeviceSensor):
description,
key=f"{description.key}-zone-{zone.zone_index}",
)
super().__init__(api, description)
super().__init__(coordinator, description)
self._attr_device_info = api.zone_device_info(zone)
self._attr_device_info = coordinator.zone_device_info(zone)
self._zone = zone
@property

View File

@@ -43,6 +43,9 @@
},
"entity": {
"sensor": {
"energy_consumed": {
"name": "Energy consumed"
},
"flow_temperature": {
"name": "Flow temperature"
},

View File

@@ -21,27 +21,27 @@ from homeassistant.const import UnitOfTemperature
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import MelCloudConfigEntry, MelCloudDevice
from .const import ATTR_STATUS
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
from .entity import MelCloudEntity
async def async_setup_entry(
hass: HomeAssistant,
_hass: HomeAssistant,
entry: MelCloudConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up MelCloud device climate based on config_entry."""
mel_devices = entry.runtime_data
coordinators = entry.runtime_data
async_add_entities(
[
AtwWaterHeater(mel_device, mel_device.device)
for mel_device in mel_devices[DEVICE_TYPE_ATW]
],
True,
AtwWaterHeater(coordinator, coordinator.device)
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
]
)
class AtwWaterHeater(WaterHeaterEntity):
class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
"""Air-to-Water water heater."""
_attr_supported_features = (
@@ -49,27 +49,26 @@ class AtwWaterHeater(WaterHeaterEntity):
| WaterHeaterEntityFeature.ON_OFF
| WaterHeaterEntityFeature.OPERATION_MODE
)
_attr_has_entity_name = True
_attr_name = None
def __init__(self, api: MelCloudDevice, device: AtwDevice) -> None:
def __init__(
self,
coordinator: MelCloudDeviceUpdateCoordinator,
device: AtwDevice,
) -> None:
"""Initialize water heater device."""
self._api = api
super().__init__(coordinator)
self._device = device
self._attr_unique_id = api.device.serial
self._attr_device_info = api.device_info
self._attr_unique_id = coordinator.device.serial
self._attr_device_info = coordinator.device_info
async def async_update(self) -> None:
"""Update state from MELCloud."""
await self._api.async_update()
async def async_turn_on(self, **kwargs: Any) -> None:
async def async_turn_on(self, **_kwargs: Any) -> None:
"""Turn the entity on."""
await self._device.set({PROPERTY_POWER: True})
await self.coordinator.async_set({PROPERTY_POWER: True})
async def async_turn_off(self, **kwargs: Any) -> None:
async def async_turn_off(self, **_kwargs: Any) -> None:
"""Turn the entity off."""
await self._device.set({PROPERTY_POWER: False})
await self.coordinator.async_set({PROPERTY_POWER: False})
@property
def extra_state_attributes(self) -> dict[str, Any] | None:
@@ -103,7 +102,7 @@ class AtwWaterHeater(WaterHeaterEntity):
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
await self._device.set(
await self.coordinator.async_set(
{
PROPERTY_TARGET_TANK_TEMPERATURE: kwargs.get(
"temperature", self.target_temperature
@@ -113,7 +112,7 @@ class AtwWaterHeater(WaterHeaterEntity):
async def async_set_operation_mode(self, operation_mode: str) -> None:
"""Set new target operation mode."""
await self._device.set({PROPERTY_OPERATION_MODE: operation_mode})
await self.coordinator.async_set({PROPERTY_OPERATION_MODE: operation_mode})
@property
def min_temp(self) -> float:

View File

@@ -7,6 +7,7 @@ from mill_local import OperationMode
import voluptuous as vol
from homeassistant.components.climate import (
ATTR_HVAC_MODE,
ClimateEntity,
ClimateEntityFeature,
HVACAction,
@@ -111,13 +112,16 @@ class MillHeater(MillBaseEntity, ClimateEntity):
super().__init__(coordinator, device)
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
"""Set new target temperature and optionally HVAC mode."""
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
return
await self.coordinator.mill_data_connection.set_heater_temp(
self._id, float(temperature)
)
await self.coordinator.async_request_refresh()
if (hvac_mode := kwargs.get(ATTR_HVAC_MODE)) is not None:
await self.async_handle_set_hvac_mode_service(hvac_mode)
else:
await self.coordinator.async_request_refresh()
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set new target hvac mode."""
@@ -125,12 +129,11 @@ class MillHeater(MillBaseEntity, ClimateEntity):
await self.coordinator.mill_data_connection.heater_control(
self._id, power_status=True
)
await self.coordinator.async_request_refresh()
elif hvac_mode == HVACMode.OFF:
await self.coordinator.mill_data_connection.heater_control(
self._id, power_status=False
)
await self.coordinator.async_request_refresh()
await self.coordinator.async_request_refresh()
@callback
def _update_attr(self, device: mill.Heater) -> None:
@@ -189,25 +192,26 @@ class LocalMillHeater(CoordinatorEntity[MillDataUpdateCoordinator], ClimateEntit
self._update_attr()
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
"""Set new target temperature and optionally HVAC mode."""
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
return
await self.coordinator.mill_data_connection.set_target_temperature(
float(temperature)
)
await self.coordinator.async_request_refresh()
if (hvac_mode := kwargs.get(ATTR_HVAC_MODE)) is not None:
await self.async_handle_set_hvac_mode_service(hvac_mode)
else:
await self.coordinator.async_request_refresh()
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set new target hvac mode."""
if hvac_mode == HVACMode.HEAT:
await self.coordinator.mill_data_connection.set_operation_mode_control_individually()
await self.coordinator.async_request_refresh()
elif hvac_mode == HVACMode.OFF:
await self.coordinator.mill_data_connection.set_operation_mode_off()
await self.coordinator.async_request_refresh()
elif hvac_mode == HVACMode.AUTO:
await self.coordinator.mill_data_connection.set_operation_mode_weekly_program()
await self.coordinator.async_request_refresh()
await self.coordinator.async_request_refresh()
@callback
def _handle_coordinator_update(self) -> None:

View File

@@ -6,6 +6,7 @@ import asyncio
from functools import partial
from http import HTTPStatus
import logging
from typing import Any
import aiohttp
@@ -47,7 +48,7 @@ from .util import supports_push
_LOGGER = logging.getLogger(__name__)
def push_registrations(hass):
def push_registrations(hass: HomeAssistant) -> dict[str, str]:
"""Return a dictionary of push enabled registrations."""
targets = {}
@@ -90,38 +91,32 @@ async def async_get_service(
discovery_info: DiscoveryInfoType | None = None,
) -> MobileAppNotificationService:
"""Get the mobile_app notification service."""
service = hass.data[DOMAIN][DATA_NOTIFY] = MobileAppNotificationService(hass)
service = hass.data[DOMAIN][DATA_NOTIFY] = MobileAppNotificationService()
return service
class MobileAppNotificationService(BaseNotificationService):
"""Implement the notification service for mobile_app."""
def __init__(self, hass):
"""Initialize the service."""
self._hass = hass
@property
def targets(self):
def targets(self) -> dict[str, str]:
"""Return a dictionary of registered targets."""
return push_registrations(self.hass)
async def async_send_message(self, message="", **kwargs):
async def async_send_message(self, message: str = "", **kwargs: Any) -> None:
"""Send a message to the Lambda APNS gateway."""
data = {ATTR_MESSAGE: message}
# Remove default title from notifications.
if (
kwargs.get(ATTR_TITLE) is not None
and kwargs.get(ATTR_TITLE) != ATTR_TITLE_DEFAULT
):
data[ATTR_TITLE] = kwargs.get(ATTR_TITLE)
title_arg := kwargs.get(ATTR_TITLE)
) is not None and title_arg != ATTR_TITLE_DEFAULT:
data[ATTR_TITLE] = title_arg
if not (targets := kwargs.get(ATTR_TARGET)):
targets = push_registrations(self.hass).values()
if kwargs.get(ATTR_DATA) is not None:
data[ATTR_DATA] = kwargs.get(ATTR_DATA)
if (data_arg := kwargs.get(ATTR_DATA)) is not None:
data[ATTR_DATA] = data_arg
local_push_channels = self.hass.data[DOMAIN][DATA_PUSH_CHANNEL]
@@ -166,7 +161,7 @@ class MobileAppNotificationService(BaseNotificationService):
try:
async with asyncio.timeout(10):
response = await async_get_clientsession(self._hass).post(
response = await async_get_clientsession(self.hass).post(
push_url, json=target_data
)
result = await response.json()

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
import logging
from typing import Any
from mycroftapi import MycroftAPI
@@ -10,6 +11,8 @@ from homeassistant.components.notify import BaseNotificationService
from homeassistant.core import HomeAssistant
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from . import DOMAIN
_LOGGER = logging.getLogger(__name__)
@@ -19,17 +22,17 @@ def get_service(
discovery_info: DiscoveryInfoType | None = None,
) -> MycroftNotificationService:
"""Get the Mycroft notification service."""
return MycroftNotificationService(hass.data["mycroft"])
return MycroftNotificationService(hass.data[DOMAIN])
class MycroftNotificationService(BaseNotificationService):
"""The Mycroft Notification Service."""
def __init__(self, mycroft_ip):
def __init__(self, mycroft_ip: str) -> None:
"""Initialize the service."""
self.mycroft_ip = mycroft_ip
def send_message(self, message="", **kwargs):
def send_message(self, message: str = "", **kwargs: Any) -> None:
"""Send a message mycroft to speak on instance."""
text = message
@@ -37,4 +40,4 @@ class MycroftNotificationService(BaseNotificationService):
if mycroft is not None:
mycroft.speak_text(text)
else:
_LOGGER.log("Could not reach this instance of mycroft")
_LOGGER.warning("Could not reach this instance of mycroft")

View File

@@ -1,25 +1,20 @@
"""Support for namecheap DNS services."""
from datetime import timedelta
import logging
import defusedxml.ElementTree as ET
import voluptuous as vol
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.typing import ConfigType
from .const import DOMAIN
from .coordinator import NamecheapConfigEntry, NamecheapDnsUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
DOMAIN = "namecheapdns"
INTERVAL = timedelta(minutes=5)
UPDATE_URL = "https://dynamicdns.park-your-domain.com/update"
CONFIG_SCHEMA = vol.Schema(
{
@@ -37,37 +32,30 @@ CONFIG_SCHEMA = vol.Schema(
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
"""Initialize the namecheap DNS component."""
host = config[DOMAIN][CONF_HOST]
domain = config[DOMAIN][CONF_DOMAIN]
password = config[DOMAIN][CONF_PASSWORD]
session = async_get_clientsession(hass)
result = await _update_namecheapdns(session, host, domain, password)
if not result:
return False
async def update_domain_interval(now):
"""Update the namecheap DNS entry."""
await _update_namecheapdns(session, host, domain, password)
async_track_time_interval(hass, update_domain_interval, INTERVAL)
return result
async def _update_namecheapdns(session, host, domain, password):
"""Update namecheap DNS entry."""
params = {"host": host, "domain": domain, "password": password}
resp = await session.get(UPDATE_URL, params=params)
xml_string = await resp.text()
root = ET.fromstring(xml_string)
err_count = root.find("ErrCount").text
if int(err_count) != 0:
_LOGGER.warning("Updating namecheap domain failed: %s", domain)
return False
if DOMAIN in config:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
)
)
return True
async def async_setup_entry(hass: HomeAssistant, entry: NamecheapConfigEntry) -> bool:
"""Set up Namecheap DynamicDNS from a config entry."""
coordinator = NamecheapDnsUpdateCoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
# Add a dummy listener as we do not have regular entities
entry.async_on_unload(coordinator.async_add_listener(lambda: None))
return True
async def async_unload_entry(hass: HomeAssistant, entry: NamecheapConfigEntry) -> bool:
"""Unload a config entry."""
return True

View File

@@ -0,0 +1,139 @@
"""Config flow for the Namecheap DynamicDNS integration."""
from __future__ import annotations
import logging
from typing import Any
from aiohttp import ClientError
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_NAME, CONF_PASSWORD
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from .const import DOMAIN
from .helpers import update_namecheapdns
from .issue import deprecate_yaml_issue
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default="@"): cv.string,
vol.Required(CONF_DOMAIN): cv.string,
vol.Required(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD, autocomplete="current-password"
)
),
}
)
STEP_RECONFIGURE_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_PASSWORD): TextSelector(
TextSelectorConfig(
type=TextSelectorType.PASSWORD, autocomplete="current-password"
)
),
}
)
class NamecheapDnsConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Namecheap DynamicDNS."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match(
{CONF_HOST: user_input[CONF_HOST], CONF_DOMAIN: user_input[CONF_DOMAIN]}
)
session = async_get_clientsession(self.hass)
try:
if not await update_namecheapdns(session, **user_input):
errors["base"] = "update_failed"
except ClientError:
_LOGGER.debug("Cannot connect", exc_info=True)
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if not errors:
return self.async_create_entry(
title=f"{user_input[CONF_HOST]}.{user_input[CONF_DOMAIN]}",
data=user_input,
)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(
data_schema=STEP_USER_DATA_SCHEMA, suggested_values=user_input
),
errors=errors,
description_placeholders={"account_panel": "https://ap.www.namecheap.com/"},
)
async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult:
"""Import config from yaml."""
self._async_abort_entries_match(
{CONF_HOST: import_info[CONF_HOST], CONF_DOMAIN: import_info[CONF_DOMAIN]}
)
result = await self.async_step_user(import_info)
if errors := result.get("errors"):
deprecate_yaml_issue(self.hass, import_success=False)
return self.async_abort(reason=errors["base"])
deprecate_yaml_issue(self.hass, import_success=True)
return result
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfigure flow."""
errors: dict[str, str] = {}
entry = self._get_reconfigure_entry()
if user_input is not None:
session = async_get_clientsession(self.hass)
try:
if not await update_namecheapdns(
session,
entry.data[CONF_HOST],
entry.data[CONF_DOMAIN],
user_input[CONF_PASSWORD],
):
errors["base"] = "update_failed"
except ClientError:
_LOGGER.debug("Cannot connect", exc_info=True)
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
if not errors:
return self.async_update_reload_and_abort(
entry,
data_updates=user_input,
)
return self.async_show_form(
step_id="reconfigure",
data_schema=STEP_RECONFIGURE_DATA_SCHEMA,
errors=errors,
description_placeholders={CONF_NAME: entry.title},
)

View File

@@ -0,0 +1,6 @@
"""Constants for the Namecheap DynamicDNS integration."""
DOMAIN = "namecheapdns"
UPDATE_URL = "https://dynamicdns.park-your-domain.com/update"

View File

@@ -0,0 +1,61 @@
"""Coordinator for the Namecheap DynamicDNS integration."""
from datetime import timedelta
import logging
from aiohttp import ClientError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
from .helpers import update_namecheapdns
_LOGGER = logging.getLogger(__name__)
type NamecheapConfigEntry = ConfigEntry[NamecheapDnsUpdateCoordinator]
INTERVAL = timedelta(minutes=5)
class NamecheapDnsUpdateCoordinator(DataUpdateCoordinator[None]):
"""Namecheap DynamicDNS update coordinator."""
config_entry: NamecheapConfigEntry
def __init__(self, hass: HomeAssistant, config_entry: NamecheapConfigEntry) -> None:
"""Initialize the Namecheap DynamicDNS update coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=INTERVAL,
)
self.session = async_get_clientsession(hass)
async def _async_update_data(self) -> None:
"""Update Namecheap DNS."""
host = self.config_entry.data[CONF_HOST]
domain = self.config_entry.data[CONF_DOMAIN]
password = self.config_entry.data[CONF_PASSWORD]
try:
if not await update_namecheapdns(self.session, host, domain, password):
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="update_failed",
translation_placeholders={CONF_DOMAIN: f"{host}.{domain}"},
)
except ClientError as e:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="connection_error",
translation_placeholders={CONF_DOMAIN: f"{host}.{domain}"},
) from e

View File

@@ -0,0 +1,24 @@
"""Helpers for the Namecheap DynamicDNS integration."""
import logging
from aiohttp import ClientSession
from .const import UPDATE_URL
_LOGGER = logging.getLogger(__name__)
async def update_namecheapdns(
session: ClientSession, host: str, domain: str, password: str
):
"""Update namecheap DNS entry."""
params = {"host": host, "domain": domain, "password": password}
resp = await session.get(UPDATE_URL, params=params)
xml_string = await resp.text()
if "<ErrCount>0</ErrCount>" not in xml_string:
return False
return True

View File

@@ -0,0 +1,40 @@
"""Issues for Namecheap DynamicDNS integration."""
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
from .const import DOMAIN
@callback
def deprecate_yaml_issue(hass: HomeAssistant, *, import_success: bool) -> None:
"""Deprecate yaml issue."""
if import_success:
async_create_issue(
hass,
HOMEASSISTANT_DOMAIN,
f"deprecated_yaml_{DOMAIN}",
is_fixable=False,
issue_domain=DOMAIN,
breaks_in_ha_version="2026.8.0",
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml",
translation_placeholders={
"domain": DOMAIN,
"integration_title": "Namecheap DynamicDNS",
},
)
else:
async_create_issue(
hass,
DOMAIN,
"deprecated_yaml_import_issue_error",
breaks_in_ha_version="2026.8.0",
is_fixable=False,
issue_domain=DOMAIN,
severity=IssueSeverity.WARNING,
translation_key="deprecated_yaml_import_issue_error",
translation_placeholders={
"url": f"/config/integrations/dashboard/add?domain={DOMAIN}"
},
)

View File

@@ -1,9 +1,10 @@
{
"domain": "namecheapdns",
"name": "Namecheap DynamicDNS",
"codeowners": [],
"codeowners": ["@tr4nt0r"],
"config_flow": true,
"documentation": "https://www.home-assistant.io/integrations/namecheapdns",
"integration_type": "service",
"iot_class": "cloud_push",
"quality_scale": "legacy",
"requirements": ["defusedxml==0.7.1"]
"requirements": []
}

View File

@@ -0,0 +1,51 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
},
"error": {
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"unknown": "[%key:common::config_flow::error::unknown%]",
"update_failed": "Updating DNS failed"
},
"step": {
"reconfigure": {
"data": {
"password": "[%key:component::namecheapdns::config::step::user::data::password%]"
},
"data_description": {
"password": "[%key:component::namecheapdns::config::step::user::data_description::password%]"
},
"title": "Re-configure {name}"
},
"user": {
"data": {
"domain": "[%key:common::config_flow::data::username%]",
"host": "[%key:common::config_flow::data::host%]",
"password": "Dynamic DNS password"
},
"data_description": {
"domain": "The domain to update ('example.com')",
"host": "The host to update ('home' for home.example.com). Use '@' to update the root domain",
"password": "Dynamic DNS password for the domain"
},
"description": "Enter your Namecheap DynamicDNS domain and password below to configure dynamic DNS updates. You can find the Dynamic DNS password in your [Namecheap account]({account_panel}) under Domain List > Manage > Advanced DNS > Dynamic DNS."
}
}
},
"exceptions": {
"connection_error": {
"message": "Updating Namecheap DynamicDNS domain {domain} failed due to a connection error"
},
"update_failed": {
"message": "Updating Namecheap DynamicDNS domain {domain} failed"
}
},
"issues": {
"deprecated_yaml_import_issue_error": {
"description": "Configuring Namecheap DynamicDNS using YAML is being removed but there was an error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Namecheap DynamicDNS YAML configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
"title": "The Namecheap DynamicDNS YAML configuration import failed"
}
}
}

View File

@@ -21,6 +21,7 @@ from .nasweb_data import NASwebData
PLATFORMS: list[Platform] = [
Platform.ALARM_CONTROL_PANEL,
Platform.CLIMATE,
Platform.SENSOR,
Platform.SWITCH,
]

View File

@@ -0,0 +1,168 @@
"""Platform for NASweb thermostat."""
from __future__ import annotations
import time
from typing import Any
from webio_api import Thermostat as NASwebThermostat
from webio_api.const import KEY_THERMOSTAT
from homeassistant.components.climate import (
ClimateEntity,
ClimateEntityFeature,
HVACAction,
HVACMode,
UnitOfTemperature,
)
from homeassistant.components.sensor import SensorDeviceClass
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import DiscoveryInfoType
from homeassistant.helpers.update_coordinator import (
BaseCoordinatorEntity,
BaseDataUpdateCoordinatorProtocol,
)
from . import NASwebConfigEntry
from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL
CLIMATE_TRANSLATION_KEY = "thermostat"
async def async_setup_entry(
hass: HomeAssistant,
config: NASwebConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up Climate platform."""
coordinator = config.runtime_data
nasweb_thermostat: NASwebThermostat = coordinator.data[KEY_THERMOSTAT]
climate = Thermostat(coordinator, nasweb_thermostat)
async_add_entities([climate])
class Thermostat(ClimateEntity, BaseCoordinatorEntity):
"""Entity representing NASweb thermostat."""
_attr_device_class = SensorDeviceClass.TEMPERATURE
_attr_has_entity_name = True
_attr_hvac_modes = [
HVACMode.OFF,
HVACMode.HEAT,
HVACMode.COOL,
HVACMode.HEAT_COOL,
HVACMode.FAN_ONLY,
]
_attr_max_temp = 50
_attr_min_temp = -50
_attr_precision = 1.0
_attr_should_poll = False
_attr_supported_features = ClimateEntityFeature(
ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
)
_attr_target_temperature_step = 1.0
_attr_temperature_unit = UnitOfTemperature.CELSIUS
_attr_translation_key = CLIMATE_TRANSLATION_KEY
def __init__(
self,
coordinator: BaseDataUpdateCoordinatorProtocol,
nasweb_thermostat: NASwebThermostat,
) -> None:
"""Initialize Thermostat."""
super().__init__(coordinator)
self._thermostat = nasweb_thermostat
self._attr_available = False
self._attr_name = nasweb_thermostat.name
self._attr_unique_id = f"{DOMAIN}.{self._thermostat.webio_serial}.thermostat"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._thermostat.webio_serial)}
)
async def async_added_to_hass(self) -> None:
"""When entity is added to hass."""
await super().async_added_to_hass()
self._handle_coordinator_update()
def _set_attr_available(
self, entity_last_update: float, available: bool | None
) -> None:
if (
self.coordinator.last_update is None
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
):
self._attr_available = False
else:
self._attr_available = available if available is not None else False
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._attr_current_temperature = self._thermostat.current_temp
self._attr_target_temperature_low = self._thermostat.temp_target_min
self._attr_target_temperature_high = self._thermostat.temp_target_max
self._attr_hvac_mode = self._get_current_hvac_mode()
self._attr_hvac_action = self._get_current_action()
self._attr_name = self._thermostat.name if self._thermostat.name else None
self._set_attr_available(
self._thermostat.last_update, self._thermostat.available
)
self.async_write_ha_state()
def _get_current_hvac_mode(self) -> HVACMode:
have_cooling = self._thermostat.enabled_above_output
have_heating = self._thermostat.enabled_below_output
if have_cooling and have_heating:
return HVACMode.HEAT_COOL
if have_cooling:
return HVACMode.COOL
if have_heating:
return HVACMode.HEAT
if self._thermostat.enabled_inrange_output:
return HVACMode.FAN_ONLY
return HVACMode.OFF
def _get_current_action(self) -> HVACAction:
if self._thermostat.current_temp is None:
return HVACAction.OFF
if (
self._thermostat.temp_target_min is not None
and self._thermostat.current_temp < self._thermostat.temp_target_min
and self._thermostat.enabled_below_output
):
return HVACAction.HEATING
if (
self._thermostat.temp_target_max is not None
and self._thermostat.current_temp > self._thermostat.temp_target_max
and self._thermostat.enabled_above_output
):
return HVACAction.COOLING
if (
self._thermostat.temp_target_min is not None
and self._thermostat.temp_target_max is not None
and self._thermostat.current_temp >= self._thermostat.temp_target_min
and self._thermostat.current_temp <= self._thermostat.temp_target_max
and self._thermostat.enabled_inrange_output
):
return HVACAction.FAN
return HVACAction.IDLE
async def async_update(self) -> None:
"""Update the entity.
Only used by the generic entity update service.
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
"""
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
"""Set HVACMode for Thermostat."""
await self._thermostat.set_hvac_mode(hvac_mode)
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set temperature range for Thermostat."""
await self._thermostat.set_temperature(
kwargs["target_temp_low"], kwargs["target_temp_high"]
)

View File

@@ -23,6 +23,7 @@ _LOGGER = logging.getLogger(__name__)
KEY_INPUTS = "inputs"
KEY_OUTPUTS = "outputs"
KEY_THERMOSTAT = "thermostat"
KEY_ZONES = "zones"
@@ -104,6 +105,7 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
KEY_OUTPUTS: self.webio_api.outputs,
KEY_INPUTS: self.webio_api.inputs,
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
KEY_THERMOSTAT: self.webio_api.thermostat,
KEY_ZONES: self.webio_api.zones,
}
self.async_set_updated_data(data)
@@ -199,6 +201,7 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
KEY_OUTPUTS: self.webio_api.outputs,
KEY_INPUTS: self.webio_api.inputs,
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
KEY_THERMOSTAT: self.webio_api.thermostat,
KEY_ZONES: self.webio_api.zones,
}
self.async_set_updated_data(new_data)

View File

@@ -29,6 +29,11 @@
"name": "Zone {index}"
}
},
"climate": {
"thermostat": {
"name": "[%key:component::climate::entity_component::_::name%]"
}
},
"sensor": {
"sensor_input": {
"name": "Input {index}",

View File

@@ -6,6 +6,5 @@
"documentation": "https://www.home-assistant.io/integrations/nederlandse_spoorwegen",
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "legacy",
"requirements": ["nsapi==3.1.3"]
}

View File

@@ -7,6 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"loggers": ["pyrail"],
"quality_scale": "legacy",
"requirements": ["pyrail==0.4.1"]
}

View File

@@ -4,27 +4,35 @@ from __future__ import annotations
from openevsehttp.__main__ import OpenEVSE
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, Platform
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryError
from homeassistant.exceptions import ConfigEntryNotReady
type OpenEVSEConfigEntry = ConfigEntry[OpenEVSE]
from .coordinator import OpenEVSEConfigEntry, OpenEVSEDataUpdateCoordinator
async def async_setup_entry(hass: HomeAssistant, entry: OpenEVSEConfigEntry) -> bool:
"""Set up openevse from a config entry."""
"""Set up OpenEVSE from a config entry."""
charger = OpenEVSE(
entry.data[CONF_HOST],
entry.data.get(CONF_USERNAME),
entry.data.get(CONF_PASSWORD),
)
entry.runtime_data = OpenEVSE(entry.data[CONF_HOST])
try:
await entry.runtime_data.test_and_get()
await charger.test_and_get()
except TimeoutError as ex:
raise ConfigEntryError("Unable to connect to charger") from ex
raise ConfigEntryNotReady("Unable to connect to charger") from ex
coordinator = OpenEVSEDataUpdateCoordinator(hass, entry, charger)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data = coordinator
await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR])
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: OpenEVSEConfigEntry) -> bool:
"""Unload a config entry."""
return await hass.config_entries.async_unload_platforms(entry, [Platform.SENSOR])

View File

@@ -3,14 +3,22 @@
from typing import Any
from openevsehttp.__main__ import OpenEVSE
from openevsehttp.exceptions import AuthenticationError, MissingSerial
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_NAME
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.service_info import zeroconf
from .const import CONF_ID, CONF_SERIAL, DOMAIN
USER_SCHEMA = vol.Schema({vol.Required(CONF_HOST): cv.string})
AUTH_SCHEMA = vol.Schema(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
"""OpenEVSE config flow."""
@@ -21,39 +29,49 @@ class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Set up the instance."""
self.discovery_info: dict[str, Any] = {}
self._host: str | None = None
async def check_status(self, host: str) -> tuple[bool, str | None]:
async def check_status(
self, host: str, user: str | None = None, password: str | None = None
) -> tuple[dict[str, str], str | None]:
"""Check if we can connect to the OpenEVSE charger."""
charger = OpenEVSE(host)
charger = OpenEVSE(host, user, password)
try:
result = await charger.test_and_get()
except TimeoutError:
return False, None
return True, result.get(CONF_SERIAL)
return {"base": "cannot_connect"}, None
except AuthenticationError:
return {"base": "invalid_auth"}, None
except MissingSerial:
return {}, None
return {}, result.get(CONF_SERIAL)
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors = {}
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
errors, serial = await self.check_status(user_input[CONF_HOST])
if (result := await self.check_status(user_input[CONF_HOST]))[0]:
if (serial := result[1]) is not None:
if not errors:
if serial is not None:
await self.async_set_unique_id(serial, raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"OpenEVSE {user_input[CONF_HOST]}",
data=user_input,
)
errors = {CONF_HOST: "cannot_connect"}
if errors["base"] == "invalid_auth":
self._host = user_input[CONF_HOST]
return await self.async_step_auth()
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({vol.Required(CONF_HOST): str}),
data_schema=self.add_suggested_values_to_schema(USER_SCHEMA, user_input),
errors=errors,
)
@@ -61,9 +79,10 @@ class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle the initial step."""
self._async_abort_entries_match({CONF_HOST: data[CONF_HOST]})
errors, serial = await self.check_status(data[CONF_HOST])
if (result := await self.check_status(data[CONF_HOST]))[0]:
if (serial := result[1]) is not None:
if not errors:
if serial is not None:
await self.async_set_unique_id(serial)
self._abort_if_unique_id_configured()
else:
@@ -92,17 +111,20 @@ class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
}
)
self.context.update({"title_placeholders": {"name": name}})
if not (await self.check_status(host))[0]:
return self.async_abort(reason="cannot_connect")
return await self.async_step_discovery_confirm()
async def async_step_discovery_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm discovery."""
errors, _ = await self.check_status(self.discovery_info[CONF_HOST])
if errors:
if errors["base"] == "invalid_auth":
return await self.async_step_auth()
return self.async_abort(reason="unavailable_host")
if user_input is None:
self._set_confirm_only()
return self.async_show_form(
step_id="discovery_confirm",
description_placeholders={"name": self.discovery_info[CONF_NAME]},
@@ -112,3 +134,36 @@ class OpenEVSEConfigFlow(ConfigFlow, domain=DOMAIN):
title=self.discovery_info[CONF_NAME],
data={CONF_HOST: self.discovery_info[CONF_HOST]},
)
async def async_step_auth(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the authentication step."""
errors: dict[str, str] = {}
if user_input is not None:
host = self._host or self.discovery_info[CONF_HOST]
errors, serial = await self.check_status(
host,
user_input[CONF_USERNAME],
user_input[CONF_PASSWORD],
)
if not errors:
if self.unique_id is None and serial is not None:
await self.async_set_unique_id(serial)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"OpenEVSE {host}",
data={
CONF_HOST: host,
CONF_USERNAME: user_input[CONF_USERNAME],
CONF_PASSWORD: user_input[CONF_PASSWORD],
},
)
return self.async_show_form(
step_id="auth",
data_schema=self.add_suggested_values_to_schema(AUTH_SCHEMA, user_input),
errors=errors,
)

View File

@@ -0,0 +1,51 @@
"""Data update coordinator for OpenEVSE."""
from __future__ import annotations
from datetime import timedelta
import logging
from openevsehttp.__main__ import OpenEVSE
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=30)
type OpenEVSEConfigEntry = ConfigEntry[OpenEVSEDataUpdateCoordinator]
class OpenEVSEDataUpdateCoordinator(DataUpdateCoordinator[None]):
"""Class to manage fetching OpenEVSE data."""
config_entry: OpenEVSEConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: OpenEVSEConfigEntry,
charger: OpenEVSE,
) -> None:
"""Initialize coordinator."""
self.charger = charger
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=SCAN_INTERVAL,
)
async def _async_update_data(self) -> None:
"""Fetch data from OpenEVSE charger."""
try:
await self.charger.update()
except TimeoutError as error:
raise UpdateFailed(
f"Timeout communicating with charger: {error}"
) from error

View File

@@ -2,6 +2,8 @@
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
import logging
from openevsehttp.__main__ import OpenEVSE
@@ -33,61 +35,82 @@ from homeassistant.helpers.entity_platform import (
AddConfigEntryEntitiesCallback,
AddEntitiesCallback,
)
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType, StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import ConfigEntry
from .const import DOMAIN, INTEGRATION_TITLE
from .coordinator import OpenEVSEConfigEntry, OpenEVSEDataUpdateCoordinator
_LOGGER = logging.getLogger(__name__)
SENSOR_TYPES: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
PARALLEL_UPDATES = 0
@dataclass(frozen=True, kw_only=True)
class OpenEVSESensorDescription(SensorEntityDescription):
"""Describes an OpenEVSE sensor entity."""
value_fn: Callable[[OpenEVSE], str | float | None]
SENSOR_TYPES: tuple[OpenEVSESensorDescription, ...] = (
OpenEVSESensorDescription(
key="status",
translation_key="status",
value_fn=lambda ev: ev.status,
),
SensorEntityDescription(
OpenEVSESensorDescription(
key="charge_time",
translation_key="charge_time",
native_unit_of_measurement=UnitOfTime.MINUTES,
native_unit_of_measurement=UnitOfTime.SECONDS,
suggested_unit_of_measurement=UnitOfTime.MINUTES,
device_class=SensorDeviceClass.DURATION,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda ev: ev.charge_time_elapsed,
),
SensorEntityDescription(
OpenEVSESensorDescription(
key="ambient_temp",
translation_key="ambient_temp",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda ev: ev.ambient_temperature,
),
SensorEntityDescription(
OpenEVSESensorDescription(
key="ir_temp",
translation_key="ir_temp",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda ev: ev.ir_temperature,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
OpenEVSESensorDescription(
key="rtc_temp",
translation_key="rtc_temp",
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
device_class=SensorDeviceClass.TEMPERATURE,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda ev: ev.rtc_temperature,
entity_registry_enabled_default=False,
),
SensorEntityDescription(
OpenEVSESensorDescription(
key="usage_session",
translation_key="usage_session",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
value_fn=lambda ev: ev.usage_session,
),
SensorEntityDescription(
OpenEVSESensorDescription(
key="usage_total",
translation_key="usage_total",
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL_INCREASING,
value_fn=lambda ev: ev.usage_total,
),
)
@@ -154,41 +177,34 @@ async def async_setup_platform(
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
entry: OpenEVSEConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Add sensors for passed config_entry in HA."""
"""Set up OpenEVSE sensors based on config entry."""
coordinator = entry.runtime_data
identifier = entry.unique_id or entry.entry_id
async_add_entities(
(
OpenEVSESensor(
config_entry.runtime_data,
description,
config_entry.entry_id,
config_entry.unique_id,
)
for description in SENSOR_TYPES
),
True,
OpenEVSESensor(coordinator, description, identifier, entry.unique_id)
for description in SENSOR_TYPES
)
class OpenEVSESensor(SensorEntity):
class OpenEVSESensor(CoordinatorEntity[OpenEVSEDataUpdateCoordinator], SensorEntity):
"""Implementation of an OpenEVSE sensor."""
_attr_has_entity_name = True
entity_description: OpenEVSESensorDescription
def __init__(
self,
charger: OpenEVSE,
description: SensorEntityDescription,
entry_id: str,
coordinator: OpenEVSEDataUpdateCoordinator,
description: OpenEVSESensorDescription,
identifier: str,
unique_id: str | None,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = description
self.charger = charger
identifier = unique_id or entry_id
self._attr_unique_id = f"{identifier}-{description.key}"
self._attr_device_info = DeviceInfo(
@@ -201,28 +217,7 @@ class OpenEVSESensor(SensorEntity):
}
self._attr_device_info[ATTR_SERIAL_NUMBER] = unique_id
async def async_update(self) -> None:
"""Get the monitored data from the charger."""
try:
await self.charger.update()
except TimeoutError:
_LOGGER.warning("Could not update status for %s", self.name)
return
sensor_type = self.entity_description.key
if sensor_type == "status":
self._attr_native_value = self.charger.status
elif sensor_type == "charge_time":
self._attr_native_value = self.charger.charge_time_elapsed / 60
elif sensor_type == "ambient_temp":
self._attr_native_value = self.charger.ambient_temperature
elif sensor_type == "ir_temp":
self._attr_native_value = self.charger.ir_temperature
elif sensor_type == "rtc_temp":
self._attr_native_value = self.charger.rtc_temperature
elif sensor_type == "usage_session":
self._attr_native_value = float(self.charger.usage_session) / 1000
elif sensor_type == "usage_total":
self._attr_native_value = float(self.charger.usage_total) / 1000
else:
self._attr_native_value = "Unknown"
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator.charger)

View File

@@ -5,9 +5,20 @@
"unavailable_host": "Unable to connect to host"
},
"error": {
"cannot_connect": "Unable to connect"
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"invalid_auth": "[%key:common::config_flow::error::invalid_auth%]"
},
"step": {
"auth": {
"data": {
"password": "[%key:common::config_flow::data::password%]",
"username": "[%key:common::config_flow::data::username%]"
},
"data_description": {
"password": "The password to access your OpenEVSE charger",
"username": "The username to access your OpenEVSE charger"
}
},
"user": {
"data": {
"host": "[%key:common::config_flow::data::host%]"

View File

@@ -9,5 +9,5 @@
"iot_class": "cloud_polling",
"loggers": ["opower"],
"quality_scale": "bronze",
"requirements": ["opower==0.16.1"]
"requirements": ["opower==0.16.2"]
}

View File

@@ -2,7 +2,8 @@
from __future__ import annotations
from typing import Literal
from collections.abc import Callable, Coroutine
from typing import Any, Literal
from pooldose.type_definitions import DeviceInfoDict, ValueDict
@@ -80,7 +81,10 @@ class PooldoseEntity(CoordinatorEntity[PooldoseCoordinator]):
return platform_data.get(self.entity_description.key)
async def _async_perform_write(
self, api_call, key: str, value: bool | str | float
self,
api_call: Callable[[str, Any], Coroutine[Any, Any, bool]],
key: str,
value: bool | str | float,
) -> None:
"""Perform a write call to the API with unified error handling.

View File

@@ -11,6 +11,6 @@
"documentation": "https://www.home-assistant.io/integrations/pooldose",
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "gold",
"requirements": ["python-pooldose==0.8.1"]
"quality_scale": "platinum",
"requirements": ["python-pooldose==0.8.2"]
}

View File

@@ -71,4 +71,4 @@ rules:
# Platinum
async-dependency: done
inject-websession: done
strict-typing: todo
strict-typing: done

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["pyportainer==1.0.22"]
"requirements": ["pyportainer==1.0.23"]
}

View File

@@ -7,6 +7,5 @@
"integration_type": "service",
"iot_class": "cloud_push",
"loggers": ["prowl"],
"quality_scale": "legacy",
"requirements": ["prowlpy==1.1.1"]
}

View File

@@ -6,6 +6,7 @@ import base64
from http import HTTPStatus
import logging
import mimetypes
from typing import Any
import requests
from requests.auth import HTTPBasicAuth
@@ -65,26 +66,23 @@ def get_service(
discovery_info: DiscoveryInfoType | None = None,
) -> PushsaferNotificationService:
"""Get the Pushsafer.com notification service."""
return PushsaferNotificationService(
config.get(CONF_DEVICE_KEY), hass.config.is_allowed_path
)
return PushsaferNotificationService(config[CONF_DEVICE_KEY])
class PushsaferNotificationService(BaseNotificationService):
"""Implementation of the notification service for Pushsafer.com."""
def __init__(self, private_key, is_allowed_path):
def __init__(self, private_key: str) -> None:
"""Initialize the service."""
self._private_key = private_key
self.is_allowed_path = is_allowed_path
def send_message(self, message="", **kwargs):
def send_message(self, message: str = "", **kwargs: Any) -> None:
"""Send a message to specified target."""
if kwargs.get(ATTR_TARGET) is None:
targets: list[str] | None
if (targets := kwargs.get(ATTR_TARGET)) is None:
targets = ["a"]
_LOGGER.debug("No target specified. Sending push to all")
else:
targets = kwargs.get(ATTR_TARGET)
_LOGGER.debug("%s target(s) specified", len(targets))
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
@@ -170,7 +168,7 @@ class PushsaferNotificationService(BaseNotificationService):
try:
if local_path is not None:
_LOGGER.debug("Loading image from local path")
if self.is_allowed_path(local_path):
if self.hass.config.is_allowed_path(local_path):
file_mimetype = mimetypes.guess_type(local_path)
_LOGGER.debug("Detected mimetype %s", file_mimetype)
with open(local_path, "rb") as binary_file:

View File

@@ -21,5 +21,5 @@
"documentation": "https://www.home-assistant.io/integrations/qingping",
"integration_type": "device",
"iot_class": "local_push",
"requirements": ["qingping-ble==1.0.1"]
"requirements": ["qingping-ble==1.1.0"]
}

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
import logging
from typing import Any
import voluptuous as vol
@@ -77,7 +78,7 @@ class TelegramNotificationService(BaseNotificationService):
self._chat_id = chat_id
self.hass = hass
def send_message(self, message="", **kwargs):
def send_message(self, message: str = "", **kwargs: Any) -> None:
"""Send a message to a user."""
service_data = {ATTR_TARGET: kwargs.get(ATTR_TARGET, self._chat_id)}
data = kwargs.get(ATTR_DATA)
@@ -126,7 +127,7 @@ class TelegramNotificationService(BaseNotificationService):
self.hass.services.call(
TELEGRAM_BOT_DOMAIN, "send_photo", service_data=service_data
)
return None
return
if data is not None and ATTR_VIDEO in data:
videos = data.get(ATTR_VIDEO)
videos = videos if isinstance(videos, list) else [videos]
@@ -135,7 +136,7 @@ class TelegramNotificationService(BaseNotificationService):
self.hass.services.call(
TELEGRAM_BOT_DOMAIN, "send_video", service_data=service_data
)
return None
return
if data is not None and ATTR_VOICE in data:
voices = data.get(ATTR_VOICE)
voices = voices if isinstance(voices, list) else [voices]
@@ -144,17 +145,19 @@ class TelegramNotificationService(BaseNotificationService):
self.hass.services.call(
TELEGRAM_BOT_DOMAIN, "send_voice", service_data=service_data
)
return None
return
if data is not None and ATTR_LOCATION in data:
service_data.update(data.get(ATTR_LOCATION))
return self.hass.services.call(
self.hass.services.call(
TELEGRAM_BOT_DOMAIN, "send_location", service_data=service_data
)
return
if data is not None and ATTR_DOCUMENT in data:
service_data.update(data.get(ATTR_DOCUMENT))
return self.hass.services.call(
self.hass.services.call(
TELEGRAM_BOT_DOMAIN, "send_document", service_data=service_data
)
return
# Send message
@@ -168,6 +171,6 @@ class TelegramNotificationService(BaseNotificationService):
TELEGRAM_BOT_DOMAIN,
service_data,
)
return self.hass.services.call(
self.hass.services.call(
TELEGRAM_BOT_DOMAIN, "send_message", service_data=service_data
)

View File

@@ -4,7 +4,7 @@ from typing import Final
from aiohttp.client_exceptions import ClientResponseError
import jwt
from tesla_fleet_api import TeslaFleetApi
from tesla_fleet_api import TeslaFleetApi, is_valid_region
from tesla_fleet_api.const import Scope
from tesla_fleet_api.exceptions import (
InvalidRegion,
@@ -14,6 +14,7 @@ from tesla_fleet_api.exceptions import (
OAuthExpired,
TeslaFleetError,
)
from tesla_fleet_api.tesla import VehicleFleet
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN, Platform
@@ -79,7 +80,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -
token = jwt.decode(access_token, options={"verify_signature": False})
scopes: list[Scope] = [Scope(s) for s in token["scp"]]
region: str = token["ou_code"].lower()
region_code = token["ou_code"].lower()
region = region_code if is_valid_region(region_code) else None
oauth_session = OAuth2Session(hass, entry, implementation)
@@ -131,14 +133,15 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -
product.pop("cached_data", None)
vin = product["vin"]
signing = product["command_signing"] == "required"
api_vehicle: VehicleFleet
if signing:
if not tesla.private_key:
await tesla.get_private_key(hass.config.path("tesla_fleet.key"))
api = tesla.vehicles.createSigned(vin)
api_vehicle = tesla.vehicles.createSigned(vin)
else:
api = tesla.vehicles.createFleet(vin)
api_vehicle = tesla.vehicles.createFleet(vin)
coordinator = TeslaFleetVehicleDataCoordinator(
hass, entry, api, product, Scope.VEHICLE_LOCATION in scopes
hass, entry, api_vehicle, product, Scope.VEHICLE_LOCATION in scopes
)
await coordinator.async_config_entry_first_refresh()
@@ -153,7 +156,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -
vehicles.append(
TeslaFleetVehicleData(
api=api,
api=api_vehicle,
coordinator=coordinator,
vin=vin,
device=device,
@@ -173,14 +176,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -
)
continue
api = tesla.energySites.create(site_id)
api_energy = tesla.energySites.create(site_id)
live_coordinator = TeslaFleetEnergySiteLiveCoordinator(hass, entry, api)
live_coordinator = TeslaFleetEnergySiteLiveCoordinator(
hass, entry, api_energy
)
history_coordinator = TeslaFleetEnergySiteHistoryCoordinator(
hass, entry, api
hass, entry, api_energy
)
info_coordinator = TeslaFleetEnergySiteInfoCoordinator(
hass, entry, api, product
hass, entry, api_energy, product
)
await live_coordinator.async_config_entry_first_refresh()
@@ -214,7 +219,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) -
energysites.append(
TeslaFleetEnergyData(
api=api,
api=api_energy,
live_coordinator=live_coordinator,
history_coordinator=history_coordinator,
info_coordinator=info_coordinator,

View File

@@ -79,7 +79,7 @@ class TeslaFleetClimateEntity(TeslaFleetVehicleEntity, ClimateEntity):
self,
data: TeslaFleetVehicleData,
side: TeslaFleetClimateSide,
scopes: Scope,
scopes: list[Scope],
) -> None:
"""Initialize the climate."""
@@ -219,7 +219,7 @@ class TeslaFleetCabinOverheatProtectionEntity(TeslaFleetVehicleEntity, ClimateEn
def __init__(
self,
data: TeslaFleetVehicleData,
scopes: Scope,
scopes: list[Scope],
) -> None:
"""Initialize the cabin overheat climate entity."""

View File

@@ -178,13 +178,15 @@ class TeslaFleetEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]])
try:
data = (await self.api.live_status())["response"]
except RateLimited as e:
LOGGER.warning(
"%s rate limited, will retry in %s seconds",
self.name,
e.data.get("after"),
)
if "after" in e.data:
if isinstance(e.data, dict) and "after" in e.data:
LOGGER.warning(
"%s rate limited, will retry in %s seconds",
self.name,
e.data["after"],
)
self.update_interval = timedelta(seconds=int(e.data["after"]))
else:
LOGGER.warning("%s rate limited, will skip refresh", self.name)
return self.data
except (InvalidToken, OAuthExpired, LoginRequired) as e:
raise ConfigEntryAuthFailed from e
@@ -240,13 +242,15 @@ class TeslaFleetEnergySiteHistoryCoordinator(DataUpdateCoordinator[dict[str, Any
try:
data = (await self.api.energy_history(TeslaEnergyPeriod.DAY))["response"]
except RateLimited as e:
LOGGER.warning(
"%s rate limited, will retry in %s seconds",
self.name,
e.data.get("after"),
)
if "after" in e.data:
if isinstance(e.data, dict) and "after" in e.data:
LOGGER.warning(
"%s rate limited, will retry in %s seconds",
self.name,
e.data["after"],
)
self.update_interval = timedelta(seconds=int(e.data["after"]))
else:
LOGGER.warning("%s rate limited, will skip refresh", self.name)
return self.data
except (InvalidToken, OAuthExpired, LoginRequired) as e:
raise ConfigEntryAuthFailed from e
@@ -303,13 +307,15 @@ class TeslaFleetEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]])
try:
data = (await self.api.site_info())["response"]
except RateLimited as e:
LOGGER.warning(
"%s rate limited, will retry in %s seconds",
self.name,
e.data.get("after"),
)
if "after" in e.data:
if isinstance(e.data, dict) and "after" in e.data:
LOGGER.warning(
"%s rate limited, will retry in %s seconds",
self.name,
e.data["after"],
)
self.update_interval = timedelta(seconds=int(e.data["after"]))
else:
LOGGER.warning("%s rate limited, will skip refresh", self.name)
return self.data
except (InvalidToken, OAuthExpired, LoginRequired) as e:
raise ConfigEntryAuthFailed from e

View File

@@ -1,7 +1,7 @@
"""Tesla Fleet parent entity class."""
from abc import abstractmethod
from typing import Any
from typing import Any, Generic, TypeVar
from tesla_fleet_api.const import Scope
from tesla_fleet_api.tesla.energysite import EnergySite
@@ -21,6 +21,8 @@ from .coordinator import (
from .helpers import wake_up_vehicle
from .models import TeslaFleetEnergyData, TeslaFleetVehicleData
_ApiT = TypeVar("_ApiT", bound=VehicleFleet | EnergySite)
class TeslaFleetEntity(
CoordinatorEntity[
@@ -28,13 +30,15 @@ class TeslaFleetEntity(
| TeslaFleetEnergySiteLiveCoordinator
| TeslaFleetEnergySiteHistoryCoordinator
| TeslaFleetEnergySiteInfoCoordinator
]
],
Generic[_ApiT],
):
"""Parent class for all TeslaFleet entities."""
_attr_has_entity_name = True
read_only: bool
scoped: bool
api: _ApiT
def __init__(
self,
@@ -42,7 +46,7 @@ class TeslaFleetEntity(
| TeslaFleetEnergySiteLiveCoordinator
| TeslaFleetEnergySiteHistoryCoordinator
| TeslaFleetEnergySiteInfoCoordinator,
api: VehicleFleet | EnergySite,
api: _ApiT,
key: str,
) -> None:
"""Initialize common aspects of a TeslaFleet entity."""
@@ -100,7 +104,7 @@ class TeslaFleetEntity(
)
class TeslaFleetVehicleEntity(TeslaFleetEntity):
class TeslaFleetVehicleEntity(TeslaFleetEntity[VehicleFleet]):
"""Parent class for TeslaFleet Vehicle entities."""
_last_update: int = 0
@@ -128,7 +132,7 @@ class TeslaFleetVehicleEntity(TeslaFleetEntity):
await wake_up_vehicle(self.vehicle)
class TeslaFleetEnergyLiveEntity(TeslaFleetEntity):
class TeslaFleetEnergyLiveEntity(TeslaFleetEntity[EnergySite]):
"""Parent class for TeslaFleet Energy Site Live entities."""
def __init__(
@@ -143,7 +147,7 @@ class TeslaFleetEnergyLiveEntity(TeslaFleetEntity):
super().__init__(data.live_coordinator, data.api, key)
class TeslaFleetEnergyHistoryEntity(TeslaFleetEntity):
class TeslaFleetEnergyHistoryEntity(TeslaFleetEntity[EnergySite]):
"""Parent class for TeslaFleet Energy Site History entities."""
def __init__(
@@ -158,7 +162,7 @@ class TeslaFleetEnergyHistoryEntity(TeslaFleetEntity):
super().__init__(data.history_coordinator, data.api, key)
class TeslaFleetEnergyInfoEntity(TeslaFleetEntity):
class TeslaFleetEnergyInfoEntity(TeslaFleetEntity[EnergySite]):
"""Parent class for TeslaFleet Energy Site Info entities."""
def __init__(
@@ -174,7 +178,7 @@ class TeslaFleetEnergyInfoEntity(TeslaFleetEntity):
class TeslaFleetWallConnectorEntity(
TeslaFleetEntity, CoordinatorEntity[TeslaFleetEnergySiteLiveCoordinator]
TeslaFleetEntity[EnergySite], CoordinatorEntity[TeslaFleetEnergySiteLiveCoordinator]
):
"""Parent class for Tesla Fleet Wall Connector entities."""

View File

@@ -8,5 +8,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==1.3.2"]
"requirements": ["tesla-fleet-api==1.4.2"]
}

View File

@@ -33,7 +33,7 @@ PARALLEL_UPDATES = 0
class TeslaFleetNumberVehicleEntityDescription(NumberEntityDescription):
"""Describes TeslaFleet Number entity."""
func: Callable[[VehicleFleet, float], Awaitable[Any]]
func: Callable[[VehicleFleet, int], Awaitable[Any]]
native_min_value: float
native_max_value: float
min_key: str | None = None
@@ -74,19 +74,19 @@ VEHICLE_DESCRIPTIONS: tuple[TeslaFleetNumberVehicleEntityDescription, ...] = (
class TeslaFleetNumberBatteryEntityDescription(NumberEntityDescription):
"""Describes TeslaFleet Number entity."""
func: Callable[[EnergySite, float], Awaitable[Any]]
func: Callable[[EnergySite, int], Awaitable[Any]]
requires: str | None = None
ENERGY_INFO_DESCRIPTIONS: tuple[TeslaFleetNumberBatteryEntityDescription, ...] = (
TeslaFleetNumberBatteryEntityDescription(
key="backup_reserve_percent",
func=lambda api, value: api.backup(int(value)),
func=lambda api, value: api.backup(value),
requires="components_battery",
),
TeslaFleetNumberBatteryEntityDescription(
key="off_grid_vehicle_charging_reserve_percent",
func=lambda api, value: api.off_grid_vehicle_charging_reserve(int(value)),
func=lambda api, value: api.off_grid_vehicle_charging_reserve(value),
requires="components_off_grid_vehicle_charging_reserve_supported",
),
)

View File

@@ -136,14 +136,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
# Remove the protobuff 'cached_data' that we do not use to save memory
product.pop("cached_data", None)
vin = product["vin"]
api = teslemetry.vehicles.create(vin)
coordinator = TeslemetryVehicleDataCoordinator(hass, entry, api, product)
vehicle = teslemetry.vehicles.create(vin)
coordinator = TeslemetryVehicleDataCoordinator(
hass, entry, vehicle, product
)
device = DeviceInfo(
identifiers={(DOMAIN, vin)},
manufacturer="Tesla",
configuration_url="https://teslemetry.com/console",
name=product["display_name"],
model=api.model,
model=vehicle.model,
serial_number=vin,
)
current_devices.add((DOMAIN, vin))
@@ -168,7 +170,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
vehicles.append(
TeslemetryVehicleData(
api=api,
api=vehicle,
config_entry=entry,
coordinator=coordinator,
poll=poll,
@@ -194,7 +196,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
)
continue
api = teslemetry.energySites.create(site_id)
energy_site = teslemetry.energySites.create(site_id)
device = DeviceInfo(
identifiers={(DOMAIN, str(site_id))},
manufacturer="Tesla",
@@ -210,7 +212,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
# Check live status endpoint works before creating its coordinator
try:
live_status = (await api.live_status())["response"]
live_status = (await energy_site.live_status())["response"]
except (InvalidToken, Forbidden, SubscriptionRequired) as e:
raise ConfigEntryAuthFailed from e
except TeslaFleetError as e:
@@ -218,19 +220,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslemetryConfigEntry) -
energysites.append(
TeslemetryEnergyData(
api=api,
api=energy_site,
live_coordinator=(
TeslemetryEnergySiteLiveCoordinator(
hass, entry, api, live_status
hass, entry, energy_site, live_status
)
if isinstance(live_status, dict)
else None
),
info_coordinator=TeslemetryEnergySiteInfoCoordinator(
hass, entry, api, product
hass, entry, energy_site, product
),
history_coordinator=(
TeslemetryEnergyHistoryCoordinator(hass, entry, api)
TeslemetryEnergyHistoryCoordinator(hass, entry, energy_site)
if powerwall
else None
),
@@ -314,7 +316,7 @@ async def async_migrate_entry(
# Convert legacy access token to OAuth tokens using migrate endpoint
try:
data = await Teslemetry(session, access_token).migrate_to_oauth(
CLIENT_ID, access_token, hass.config.location_name
CLIENT_ID, hass.config.location_name
)
except (ClientError, TypeError) as e:
raise ConfigEntryAuthFailed from e

View File

@@ -7,7 +7,11 @@ from typing import TYPE_CHECKING, Any
from tesla_fleet_api.const import TeslaEnergyPeriod, VehicleDataEndpoint
from tesla_fleet_api.exceptions import (
GatewayTimeout,
InvalidResponse,
InvalidToken,
RateLimited,
ServiceUnavailable,
SubscriptionRequired,
TeslaFleetError,
)
@@ -23,6 +27,22 @@ if TYPE_CHECKING:
from .const import ENERGY_HISTORY_FIELDS, LOGGER
from .helpers import flatten
RETRY_EXCEPTIONS = (
InvalidResponse,
RateLimited,
ServiceUnavailable,
GatewayTimeout,
)
def _get_retry_after(e: TeslaFleetError) -> float:
"""Calculate wait time from exception."""
if isinstance(e.data, dict):
if after := e.data.get("after"):
return float(after)
return 10.0
VEHICLE_INTERVAL = timedelta(seconds=60)
VEHICLE_WAIT = timedelta(minutes=15)
ENERGY_LIVE_INTERVAL = timedelta(seconds=30)
@@ -69,14 +89,14 @@ class TeslemetryVehicleDataCoordinator(DataUpdateCoordinator[dict[str, Any]]):
async def _async_update_data(self) -> dict[str, Any]:
"""Update vehicle data using Teslemetry API."""
try:
data = (await self.api.vehicle_data(endpoints=ENDPOINTS))["response"]
except (InvalidToken, SubscriptionRequired) as e:
raise ConfigEntryAuthFailed from e
except RETRY_EXCEPTIONS as e:
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
except TeslaFleetError as e:
raise UpdateFailed(e.message) from e
return flatten(data)
@@ -111,19 +131,18 @@ class TeslemetryEnergySiteLiveCoordinator(DataUpdateCoordinator[dict[str, Any]])
async def _async_update_data(self) -> dict[str, Any]:
"""Update energy site data using Teslemetry API."""
try:
data = (await self.api.live_status())["response"]
except (InvalidToken, SubscriptionRequired) as e:
raise ConfigEntryAuthFailed from e
except RETRY_EXCEPTIONS as e:
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
except TeslaFleetError as e:
raise UpdateFailed(e.message) from e
# Convert Wall Connectors from array to dict
data["wall_connectors"] = {
wc["din"]: wc for wc in (data.get("wall_connectors") or [])
}
return data
@@ -152,14 +171,14 @@ class TeslemetryEnergySiteInfoCoordinator(DataUpdateCoordinator[dict[str, Any]])
async def _async_update_data(self) -> dict[str, Any]:
"""Update energy site data using Teslemetry API."""
try:
data = (await self.api.site_info())["response"]
except (InvalidToken, SubscriptionRequired) as e:
raise ConfigEntryAuthFailed from e
except RETRY_EXCEPTIONS as e:
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
except TeslaFleetError as e:
raise UpdateFailed(e.message) from e
return flatten(data)
@@ -187,11 +206,12 @@ class TeslemetryEnergyHistoryCoordinator(DataUpdateCoordinator[dict[str, Any]]):
async def _async_update_data(self) -> dict[str, Any]:
"""Update energy site data using Teslemetry API."""
try:
data = (await self.api.energy_history(TeslaEnergyPeriod.DAY))["response"]
except (InvalidToken, SubscriptionRequired) as e:
raise ConfigEntryAuthFailed from e
except RETRY_EXCEPTIONS as e:
raise UpdateFailed(e.message, retry_after=_get_retry_after(e)) from e
except TeslaFleetError as e:
raise UpdateFailed(e.message) from e

View File

@@ -8,5 +8,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["tesla-fleet-api"],
"requirements": ["tesla-fleet-api==1.3.2", "teslemetry-stream==0.9.0"]
"requirements": ["tesla-fleet-api==1.4.2", "teslemetry-stream==0.9.0"]
}

View File

@@ -149,7 +149,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
config = async_get_config_for_device(hass, device)
vehicle = async_get_vehicle_for_entry(hass, device, config)
time: int | None = None
time: int
# Convert time to minutes since minute
if "time" in call.data:
(hours, minutes, *_seconds) = call.data["time"].split(":")
@@ -158,6 +158,8 @@ def async_setup_services(hass: HomeAssistant) -> None:
raise ServiceValidationError(
translation_domain=DOMAIN, translation_key="set_scheduled_charging_time"
)
else:
time = 0
await handle_vehicle_command(
vehicle.api.set_scheduled_charging(enable=call.data["enable"], time=time)
@@ -198,6 +200,8 @@ def async_setup_services(hass: HomeAssistant) -> None:
translation_domain=DOMAIN,
translation_key="set_scheduled_departure_preconditioning",
)
else:
departure_time = 0
# Off peak charging
off_peak_charging_enabled = call.data.get(ATTR_OFF_PEAK_CHARGING_ENABLED, False)
@@ -214,6 +218,8 @@ def async_setup_services(hass: HomeAssistant) -> None:
translation_domain=DOMAIN,
translation_key="set_scheduled_departure_off_peak",
)
else:
end_off_peak_time = 0
await handle_vehicle_command(
vehicle.api.set_scheduled_departure(
@@ -252,9 +258,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
vehicle = async_get_vehicle_for_entry(hass, device, config)
await handle_vehicle_command(
vehicle.api.set_valet_mode(
call.data.get("enable"), call.data.get("pin", "")
)
vehicle.api.set_valet_mode(call.data["enable"], call.data["pin"])
)
hass.services.async_register(
@@ -276,14 +280,14 @@ def async_setup_services(hass: HomeAssistant) -> None:
config = async_get_config_for_device(hass, device)
vehicle = async_get_vehicle_for_entry(hass, device, config)
enable = call.data.get("enable")
enable = call.data["enable"]
if enable is True:
await handle_vehicle_command(
vehicle.api.speed_limit_activate(call.data.get("pin"))
vehicle.api.speed_limit_activate(call.data["pin"])
)
elif enable is False:
await handle_vehicle_command(
vehicle.api.speed_limit_deactivate(call.data.get("pin"))
vehicle.api.speed_limit_deactivate(call.data["pin"])
)
hass.services.async_register(
@@ -306,7 +310,7 @@ def async_setup_services(hass: HomeAssistant) -> None:
site = async_get_energy_site_for_entry(hass, device, config)
resp = await handle_command(
site.api.time_of_use_settings(call.data.get(ATTR_TOU_SETTINGS))
site.api.time_of_use_settings(call.data[ATTR_TOU_SETTINGS])
)
if "error" in resp:
raise HomeAssistantError(

View File

@@ -1127,6 +1127,15 @@
"no_vehicle_data_for_device": {
"message": "No vehicle data for device ID: {device_id}"
},
"set_scheduled_charging_time": {
"message": "Scheduled charging time is required when enabling"
},
"set_scheduled_departure_off_peak": {
"message": "Off-peak charging end time is required when enabling"
},
"set_scheduled_departure_preconditioning": {
"message": "Preconditioning departure time is required when enabling"
},
"wake_up_failed": {
"message": "Failed to wake up vehicle: {message}"
},

View File

@@ -7,5 +7,5 @@
"integration_type": "hub",
"iot_class": "cloud_polling",
"loggers": ["tessie", "tesla-fleet-api"],
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.3.2"]
"requirements": ["tessie-api==0.1.1", "tesla-fleet-api==1.4.2"]
}

View File

@@ -33,7 +33,7 @@ from .const import (
from .coordinator import TibberDataAPICoordinator
from .services import async_setup_services
PLATFORMS = [Platform.NOTIFY, Platform.SENSOR]
PLATFORMS = [Platform.BINARY_SENSOR, Platform.NOTIFY, Platform.SENSOR]
CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN)

View File

@@ -0,0 +1,123 @@
"""Support for Tibber binary sensors."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
import logging
import tibber
from tibber.data_api import TibberDevice
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, TibberConfigEntry
from .coordinator import TibberDataAPICoordinator
_LOGGER = logging.getLogger(__name__)
@dataclass(frozen=True, kw_only=True)
class TibberBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Describes Tibber binary sensor entity."""
is_on_fn: Callable[[str], bool | None]
DATA_API_BINARY_SENSORS: tuple[TibberBinarySensorEntityDescription, ...] = (
TibberBinarySensorEntityDescription(
key="connector.status",
device_class=BinarySensorDeviceClass.PLUG,
is_on_fn={"connected": True, "disconnected": False}.get,
),
TibberBinarySensorEntityDescription(
key="charging.status",
device_class=BinarySensorDeviceClass.BATTERY_CHARGING,
is_on_fn={"charging": True, "idle": False}.get,
),
TibberBinarySensorEntityDescription(
key="onOff",
device_class=BinarySensorDeviceClass.POWER,
is_on_fn={"on": True, "off": False}.get,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: TibberConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Tibber binary sensors."""
coordinator = entry.runtime_data.data_api_coordinator
assert coordinator is not None
entities: list[TibberDataAPIBinarySensor] = []
api_binary_sensors = {sensor.key: sensor for sensor in DATA_API_BINARY_SENSORS}
for device in coordinator.data.values():
for sensor in device.sensors:
description: TibberBinarySensorEntityDescription | None = (
api_binary_sensors.get(sensor.id)
)
if description is None:
continue
entities.append(TibberDataAPIBinarySensor(coordinator, device, description))
async_add_entities(entities)
class TibberDataAPIBinarySensor(
CoordinatorEntity[TibberDataAPICoordinator], BinarySensorEntity
):
"""Representation of a Tibber Data API binary sensor."""
_attr_has_entity_name = True
entity_description: TibberBinarySensorEntityDescription
def __init__(
self,
coordinator: TibberDataAPICoordinator,
device: TibberDevice,
entity_description: TibberBinarySensorEntityDescription,
) -> None:
"""Initialize the binary sensor."""
super().__init__(coordinator)
self._device_id: str = device.id
self.entity_description = entity_description
self._attr_unique_id = f"{device.id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.external_id)},
name=device.name,
manufacturer=device.brand,
model=device.model,
)
@property
def available(self) -> bool:
"""Return if entity is available."""
return (
super().available and self._device_id in self.coordinator.sensors_by_device
)
@property
def device(self) -> dict[str, tibber.data_api.Sensor]:
"""Return the device sensors."""
return self.coordinator.sensors_by_device[self._device_id]
@property
def is_on(self) -> bool | None:
"""Return the state of the binary sensor."""
return self.entity_description.is_on_fn(
str(self.device[self.entity_description.key].value)
)

View File

@@ -34,7 +34,7 @@ from homeassistant.const import (
)
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
@@ -351,7 +351,6 @@ async def _async_setup_graphql_sensors(
tibber_connection = entry.runtime_data.tibber_connection
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)
coordinator: TibberDataCoordinator | None = None
entities: list[TibberSensor] = []
@@ -391,25 +390,6 @@ async def _async_setup_graphql_sensors(
).async_set_updated_data
)
# migrate
old_id = home.info["viewer"]["home"]["meteringPointData"]["consumptionEan"]
if old_id is None:
continue
# migrate to new device ids
old_entity_id = entity_registry.async_get_entity_id("sensor", DOMAIN, old_id)
if old_entity_id is not None:
entity_registry.async_update_entity(
old_entity_id, new_unique_id=home.home_id
)
# migrate to new device ids
device_entry = device_registry.async_get_device(identifiers={(DOMAIN, old_id)})
if device_entry and entry.entry_id in device_entry.config_entries:
device_registry.async_update_device(
device_entry.id, new_identifiers={(DOMAIN, home.home_id)}
)
async_add_entities(entities)
@@ -430,9 +410,6 @@ def _setup_data_api_sensors(
for sensor in device.sensors:
description: SensorEntityDescription | None = api_sensors.get(sensor.id)
if description is None:
_LOGGER.debug(
"Sensor %s not found in DATA_API_SENSORS, skipping", sensor
)
continue
entities.append(TibberDataAPISensor(coordinator, device, description))
async_add_entities(entities)

View File

@@ -38,6 +38,7 @@ from .models import (
DPCodeEnumWrapper,
DPCodeIntegerWrapper,
)
from .type_information import EnumTypeInformation
TUYA_HVAC_TO_HA = {
"auto": HVACMode.HEAT_COOL,
@@ -139,6 +140,58 @@ class _SwingModeWrapper(DeviceWrapper):
return commands
class _HvacModeWrapper(DPCodeEnumWrapper):
"""Wrapper for managing climate HVACMode."""
# Modes that do not map to HVAC modes are ignored (they are handled by PresetWrapper)
def __init__(self, dpcode: str, type_information: EnumTypeInformation) -> None:
"""Init _HvacModeWrapper."""
super().__init__(dpcode, type_information)
self.options = [
TUYA_HVAC_TO_HA[tuya_mode]
for tuya_mode in type_information.range
if tuya_mode in TUYA_HVAC_TO_HA
]
def read_device_status(self, device: CustomerDevice) -> HVACMode | None:
"""Read the device status."""
if (raw := super().read_device_status(device)) not in TUYA_HVAC_TO_HA:
return None
return TUYA_HVAC_TO_HA[raw]
def _convert_value_to_raw_value(
self, device: CustomerDevice, value: HVACMode
) -> Any:
"""Convert value to raw value."""
return next(
tuya_mode
for tuya_mode, ha_mode in TUYA_HVAC_TO_HA.items()
if ha_mode == value
)
class _PresetWrapper(DPCodeEnumWrapper):
"""Wrapper for managing climate preset modes."""
# Modes that map to HVAC modes are ignored (they are handled by HVACModeWrapper)
def __init__(self, dpcode: str, type_information: EnumTypeInformation) -> None:
"""Init _PresetWrapper."""
super().__init__(dpcode, type_information)
self.options = [
tuya_mode
for tuya_mode in type_information.range
if tuya_mode not in TUYA_HVAC_TO_HA
]
def read_device_status(self, device: CustomerDevice) -> str | None:
"""Read the device status."""
if (raw := super().read_device_status(device)) in TUYA_HVAC_TO_HA:
return None
return raw
@dataclass(frozen=True, kw_only=True)
class TuyaClimateEntityDescription(ClimateEntityDescription):
"""Describe an Tuya climate entity."""
@@ -296,7 +349,10 @@ async def async_setup_entry(
(DPCode.FAN_SPEED_ENUM, DPCode.LEVEL, DPCode.WINDSPEED),
prefer_function=True,
),
hvac_mode_wrapper=DPCodeEnumWrapper.find_dpcode(
hvac_mode_wrapper=_HvacModeWrapper.find_dpcode(
device, DPCode.MODE, prefer_function=True
),
preset_wrapper=_PresetWrapper.find_dpcode(
device, DPCode.MODE, prefer_function=True
),
set_temperature_wrapper=temperature_wrappers[1],
@@ -322,7 +378,6 @@ async def async_setup_entry(
class TuyaClimateEntity(TuyaEntity, ClimateEntity):
"""Tuya Climate Device."""
_hvac_to_tuya: dict[str, str]
entity_description: TuyaClimateEntityDescription
_attr_name = None
@@ -335,7 +390,8 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
current_humidity_wrapper: DeviceWrapper[int] | None,
current_temperature_wrapper: DeviceWrapper[float] | None,
fan_mode_wrapper: DeviceWrapper[str] | None,
hvac_mode_wrapper: DeviceWrapper[str] | None,
hvac_mode_wrapper: DeviceWrapper[HVACMode] | None,
preset_wrapper: DeviceWrapper[str] | None,
set_temperature_wrapper: DeviceWrapper[float] | None,
swing_wrapper: DeviceWrapper[str] | None,
switch_wrapper: DeviceWrapper[bool] | None,
@@ -351,6 +407,7 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
self._current_temperature = current_temperature_wrapper
self._fan_mode_wrapper = fan_mode_wrapper
self._hvac_mode_wrapper = hvac_mode_wrapper
self._preset_wrapper = preset_wrapper
self._set_temperature = set_temperature_wrapper
self._swing_wrapper = swing_wrapper
self._switch_wrapper = switch_wrapper
@@ -366,29 +423,24 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
self._attr_target_temperature_step = set_temperature_wrapper.value_step
# Determine HVAC modes
self._attr_hvac_modes: list[HVACMode] = []
self._hvac_to_tuya = {}
self._attr_hvac_modes = []
if hvac_mode_wrapper:
self._attr_hvac_modes = [HVACMode.OFF]
unknown_hvac_modes: list[str] = []
for tuya_mode in hvac_mode_wrapper.options:
if tuya_mode in TUYA_HVAC_TO_HA:
ha_mode = TUYA_HVAC_TO_HA[tuya_mode]
self._hvac_to_tuya[ha_mode] = tuya_mode
self._attr_hvac_modes.append(ha_mode)
else:
unknown_hvac_modes.append(tuya_mode)
for mode in hvac_mode_wrapper.options:
self._attr_hvac_modes.append(HVACMode(mode))
if unknown_hvac_modes: # Tuya modes are presets instead of hvac_modes
self._attr_hvac_modes.append(description.switch_only_hvac_mode)
self._attr_preset_modes = unknown_hvac_modes
self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE
elif switch_wrapper:
self._attr_hvac_modes = [
HVACMode.OFF,
description.switch_only_hvac_mode,
]
# Determine preset modes (ignore if empty options)
if preset_wrapper and preset_wrapper.options:
self._attr_hvac_modes.append(description.switch_only_hvac_mode)
self._attr_preset_modes = preset_wrapper.options
self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE
# Determine dpcode to use for setting the humidity
if target_humidity_wrapper:
self._attr_supported_features |= ClimateEntityFeature.TARGET_HUMIDITY
@@ -419,17 +471,15 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
self.device, hvac_mode != HVACMode.OFF
)
)
if self._hvac_mode_wrapper and hvac_mode in self._hvac_to_tuya:
if self._hvac_mode_wrapper and hvac_mode in self._hvac_mode_wrapper.options:
commands.extend(
self._hvac_mode_wrapper.get_update_commands(
self.device, self._hvac_to_tuya[hvac_mode]
)
self._hvac_mode_wrapper.get_update_commands(self.device, hvac_mode)
)
await self._async_send_commands(commands)
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set new target preset mode."""
await self._async_send_wrapper_updates(self._hvac_mode_wrapper, preset_mode)
await self._async_send_wrapper_updates(self._preset_wrapper, preset_mode)
async def async_set_fan_mode(self, fan_mode: str) -> None:
"""Set new target fan mode."""
@@ -484,21 +534,12 @@ class TuyaClimateEntity(TuyaEntity, ClimateEntity):
return None
# If we do have a mode wrapper, check if the mode maps to an HVAC mode.
if (hvac_status := self._read_wrapper(self._hvac_mode_wrapper)) is None:
return None
return TUYA_HVAC_TO_HA.get(hvac_status)
return self._read_wrapper(self._hvac_mode_wrapper)
@property
def preset_mode(self) -> str | None:
"""Return preset mode."""
if self._hvac_mode_wrapper is None:
return None
mode = self._read_wrapper(self._hvac_mode_wrapper)
if mode in TUYA_HVAC_TO_HA:
return None
return mode
return self._read_wrapper(self._preset_wrapper)
@property
def fan_mode(self) -> str | None:

View File

@@ -41,7 +41,7 @@
"iot_class": "local_push",
"loggers": ["uiprotect", "unifi_discovery"],
"quality_scale": "platinum",
"requirements": ["uiprotect==8.0.0", "unifi-discovery==1.2.0"],
"requirements": ["uiprotect==8.1.1", "unifi-discovery==1.2.0"],
"ssdp": [
{
"manufacturer": "Ubiquiti Networks",

Some files were not shown because too many files have changed in this diff Show More