mirror of
https://github.com/home-assistant/core.git
synced 2026-01-14 11:08:13 +00:00
Compare commits
37 Commits
light_cond
...
mqtt-entit
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
41f3591fbc | ||
|
|
c3e4676b4c | ||
|
|
f852220282 | ||
|
|
5dd3bf04eb | ||
|
|
b0c2fdc57b | ||
|
|
617d44ffcf | ||
|
|
8fb8eed1c8 | ||
|
|
1ddbd4755b | ||
|
|
3bd76294dc | ||
|
|
bb97822db9 | ||
|
|
33ffccabd1 | ||
|
|
56de03ce33 | ||
|
|
0cbf7002a8 | ||
|
|
cffceffe04 | ||
|
|
253189805e | ||
|
|
2e91725ac0 | ||
|
|
3b54dddc08 | ||
|
|
9bc3d83a55 | ||
|
|
d62a554cbf | ||
|
|
f071b7cd46 | ||
|
|
37f34f6189 | ||
|
|
27dc5b6d18 | ||
|
|
0bbc2f49a6 | ||
|
|
c121fa25e8 | ||
|
|
660cea8b65 | ||
|
|
c7749ebae1 | ||
|
|
a2acb744b3 | ||
|
|
0d9158689d | ||
|
|
f85e8d6c1f | ||
|
|
9be4cc5af1 | ||
|
|
a141eedf2c | ||
|
|
03040c131c | ||
|
|
3eef50632c | ||
|
|
eff150cd54 | ||
|
|
6dcc94b0a1 | ||
|
|
7201903877 | ||
|
|
5b776307ea |
@@ -40,8 +40,7 @@
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"python.testing.pytestArgs": ["--no-cov"],
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
// Pyright type checking is not compatible with mypy which Home Assistant uses for type checking
|
||||
"python.analysis.typeCheckingMode": "off",
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"editor.formatOnPaste": false,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnType": true,
|
||||
|
||||
4
.github/copilot-instructions.md
vendored
4
.github/copilot-instructions.md
vendored
@@ -847,8 +847,8 @@ rules:
|
||||
## Development Commands
|
||||
|
||||
### Code Quality & Linting
|
||||
- **Run all linters on all files**: `prek run --all-files`
|
||||
- **Run linters on staged files only**: `prek run`
|
||||
- **Run all linters on all files**: `pre-commit run --all-files`
|
||||
- **Run linters on staged files only**: `pre-commit run`
|
||||
- **PyLint on everything** (slow): `pylint homeassistant`
|
||||
- **PyLint on specific folder**: `pylint homeassistant/components/my_integration`
|
||||
- **MyPy type checking (whole project)**: `mypy homeassistant/`
|
||||
|
||||
179
.github/workflows/ci.yaml
vendored
179
.github/workflows/ci.yaml
vendored
@@ -59,6 +59,7 @@ env:
|
||||
# 15 is the latest version
|
||||
# - 15.2 is the latest (as of 9 Feb 2023)
|
||||
POSTGRESQL_VERSIONS: "['postgres:12.14','postgres:15.2']"
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
UV_CACHE_DIR: /tmp/uv-cache
|
||||
APT_CACHE_BASE: /home/runner/work/apt
|
||||
APT_CACHE_DIR: /home/runner/work/apt/cache
|
||||
@@ -82,6 +83,7 @@ jobs:
|
||||
integrations_glob: ${{ steps.info.outputs.integrations_glob }}
|
||||
integrations: ${{ steps.integrations.outputs.changes }}
|
||||
apt_cache_key: ${{ steps.generate_apt_cache_key.outputs.key }}
|
||||
pre-commit_cache_key: ${{ steps.generate_pre-commit_cache_key.outputs.key }}
|
||||
python_cache_key: ${{ steps.generate_python_cache_key.outputs.key }}
|
||||
requirements: ${{ steps.core.outputs.requirements }}
|
||||
mariadb_groups: ${{ steps.info.outputs.mariadb_groups }}
|
||||
@@ -109,6 +111,11 @@ jobs:
|
||||
hashFiles('requirements_all.txt') }}-${{
|
||||
hashFiles('homeassistant/package_constraints.txt') }}-${{
|
||||
hashFiles('script/gen_requirements_all.py') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial pre-commit restore key
|
||||
id: generate_pre-commit_cache_key
|
||||
run: >-
|
||||
echo "key=pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('.pre-commit-config.yaml') }}" >> $GITHUB_OUTPUT
|
||||
- name: Generate partial apt restore key
|
||||
id: generate_apt_cache_key
|
||||
run: |
|
||||
@@ -237,8 +244,8 @@ jobs:
|
||||
echo "skip_coverage: ${skip_coverage}"
|
||||
echo "skip_coverage=${skip_coverage}" >> $GITHUB_OUTPUT
|
||||
|
||||
prek:
|
||||
name: Run prek checks
|
||||
pre-commit:
|
||||
name: Prepare pre-commit base
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: [info]
|
||||
if: |
|
||||
@@ -247,23 +254,147 @@ jobs:
|
||||
&& github.event.inputs.audit-licenses-only != 'true'
|
||||
steps:
|
||||
- *checkout
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: venv
|
||||
key: &key-pre-commit-venv >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-venv-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python --version
|
||||
pip install "$(grep '^uv' < requirements.txt)"
|
||||
uv pip install "$(cat requirements_test.txt | grep pre-commit)"
|
||||
- name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: *actions-cache
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
lookup-only: true
|
||||
key: &key-pre-commit-env >-
|
||||
${{ runner.os }}-${{ runner.arch }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.info.outputs.pre-commit_cache_key }}
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install-hooks
|
||||
|
||||
lint-ruff-format:
|
||||
name: Check ruff-format
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: &needs-pre-commit
|
||||
- info
|
||||
- pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- &cache-restore-pre-commit-venv
|
||||
name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
with:
|
||||
path: venv
|
||||
fail-on-cache-miss: true
|
||||
key: *key-pre-commit-venv
|
||||
- &cache-restore-pre-commit-env
|
||||
name: Restore pre-commit environment from cache
|
||||
id: cache-precommit
|
||||
uses: *actions-cache-restore
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
fail-on-cache-miss: true
|
||||
key: *key-pre-commit-env
|
||||
- name: Run ruff-format
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff-format --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-ruff:
|
||||
name: Check ruff
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: *needs-pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- *cache-restore-pre-commit-venv
|
||||
- *cache-restore-pre-commit-env
|
||||
- name: Run ruff
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual ruff-check --all-files --show-diff-on-failure
|
||||
env:
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
|
||||
lint-other:
|
||||
name: Check other linters
|
||||
runs-on: *runs-on-ubuntu
|
||||
needs: *needs-pre-commit
|
||||
steps:
|
||||
- *checkout
|
||||
- *setup-python-default
|
||||
- *cache-restore-pre-commit-venv
|
||||
- *cache-restore-pre-commit-env
|
||||
|
||||
- name: Register yamllint problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
|
||||
- name: Run yamllint
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual yamllint --all-files --show-diff-on-failure
|
||||
|
||||
- name: Register check-json problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-json.json"
|
||||
- name: Run check-json
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-json --all-files --show-diff-on-failure
|
||||
|
||||
- name: Run prettier (fully)
|
||||
if: needs.info.outputs.test_full_suite == 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual prettier --all-files --show-diff-on-failure
|
||||
|
||||
- name: Run prettier (partially)
|
||||
if: needs.info.outputs.test_full_suite == 'false'
|
||||
shell: bash
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
shopt -s globstar
|
||||
pre-commit run --hook-stage manual prettier --show-diff-on-failure --files {homeassistant,tests}/components/${{ needs.info.outputs.integrations_glob }}/{*,**/*}
|
||||
|
||||
- name: Register check executables problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/check-executables-have-shebangs.json"
|
||||
- name: Run executables check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --hook-stage manual check-executables-have-shebangs --all-files --show-diff-on-failure
|
||||
|
||||
- name: Register codespell problem matcher
|
||||
run: |
|
||||
echo "::add-matcher::.github/workflows/matchers/codespell.json"
|
||||
- name: Run prek
|
||||
uses: j178/prek-action@91fd7d7cf70ae1dee9f4f44e7dfa5d1073fe6623 # v1.0.11
|
||||
env:
|
||||
PREK_SKIP: no-commit-to-branch,mypy,pylint,gen_requirements_all,hassfest,hassfest-metadata,hassfest-mypy-config
|
||||
RUFF_OUTPUT_FORMAT: github
|
||||
- name: Run codespell
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit run --show-diff-on-failure --hook-stage manual codespell --all-files
|
||||
|
||||
lint-hadolint:
|
||||
name: Check ${{ matrix.file }}
|
||||
@@ -303,7 +434,7 @@ jobs:
|
||||
- &setup-python-matrix
|
||||
name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: &actions-setup-python actions/setup-python@83679a892e2d95755f2dac6acb0bfd1e9ac5d548 # v6.1.0
|
||||
uses: *actions-setup-python
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
check-latest: true
|
||||
@@ -316,7 +447,7 @@ jobs:
|
||||
env.HA_SHORT_VERSION }}-$(date -u '+%Y-%m-%dT%H:%M:%s')" >> $GITHUB_OUTPUT
|
||||
- name: Restore base Python virtual environment
|
||||
id: cache-venv
|
||||
uses: &actions-cache actions/cache@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: *actions-cache
|
||||
with:
|
||||
path: venv
|
||||
key: &key-python-venv >-
|
||||
@@ -431,7 +562,7 @@ jobs:
|
||||
steps:
|
||||
- &cache-restore-apt
|
||||
name: Restore apt cache
|
||||
uses: &actions-cache-restore actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
|
||||
uses: *actions-cache-restore
|
||||
with:
|
||||
path: *path-apt-cache
|
||||
fail-on-cache-miss: true
|
||||
@@ -448,13 +579,7 @@ jobs:
|
||||
-o Dir::State::Lists=${{ env.APT_LIST_CACHE_DIR }} \
|
||||
libturbojpeg
|
||||
- *checkout
|
||||
- &setup-python-default
|
||||
name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: *actions-setup-python
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
check-latest: true
|
||||
- *setup-python-default
|
||||
- &cache-restore-python-default
|
||||
name: Restore full Python ${{ env.DEFAULT_PYTHON }} virtual environment
|
||||
id: cache-venv
|
||||
@@ -657,7 +782,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
steps:
|
||||
- *cache-restore-apt
|
||||
@@ -696,7 +823,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
- prepare-pytest-full
|
||||
if: |
|
||||
@@ -820,7 +949,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
@@ -935,7 +1066,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
@@ -1069,7 +1202,9 @@ jobs:
|
||||
- base
|
||||
- gen-requirements-all
|
||||
- hassfest
|
||||
- prek
|
||||
- lint-other
|
||||
- lint-ruff
|
||||
- lint-ruff-format
|
||||
- mypy
|
||||
if: |
|
||||
needs.info.outputs.lint_only != 'true'
|
||||
|
||||
4
.github/workflows/codeql.yml
vendored
4
.github/workflows/codeql.yml
vendored
@@ -24,11 +24,11 @@ jobs:
|
||||
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
|
||||
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
languages: python
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@cdefb33c0f6224e58673d9004f47f7cb3e328b89 # v4.31.10
|
||||
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
|
||||
with:
|
||||
category: "/language:python"
|
||||
|
||||
@@ -39,14 +39,14 @@ repos:
|
||||
- id: prettier
|
||||
additional_dependencies:
|
||||
- prettier@3.6.2
|
||||
- prettier-plugin-sort-json@4.2.0
|
||||
- prettier-plugin-sort-json@4.1.1
|
||||
- repo: https://github.com/cdce8p/python-typing-update
|
||||
rev: v0.6.0
|
||||
hooks:
|
||||
# Run `python-typing-update` hook manually from time to time
|
||||
# to update python typing syntax.
|
||||
# Will require manual work, before submitting changes!
|
||||
# prek run --hook-stage manual python-typing-update --all-files
|
||||
# pre-commit run --hook-stage manual python-typing-update --all-files
|
||||
- id: python-typing-update
|
||||
stages: [manual]
|
||||
args:
|
||||
|
||||
@@ -407,7 +407,6 @@ homeassistant.components.person.*
|
||||
homeassistant.components.pi_hole.*
|
||||
homeassistant.components.ping.*
|
||||
homeassistant.components.plugwise.*
|
||||
homeassistant.components.pooldose.*
|
||||
homeassistant.components.portainer.*
|
||||
homeassistant.components.powerfox.*
|
||||
homeassistant.components.powerwall.*
|
||||
|
||||
4
.vscode/settings.default.jsonc
vendored
4
.vscode/settings.default.jsonc
vendored
@@ -7,8 +7,8 @@
|
||||
"python.testing.pytestEnabled": false,
|
||||
// https://code.visualstudio.com/docs/python/linting#_general-settings
|
||||
"pylint.importStrategy": "fromEnvironment",
|
||||
// Pyright type checking is not compatible with mypy which Home Assistant uses for type checking
|
||||
"python.analysis.typeCheckingMode": "off",
|
||||
// Pyright is too pedantic for Home Assistant
|
||||
"python.analysis.typeCheckingMode": "basic",
|
||||
"[python]": {
|
||||
"editor.defaultFormatter": "charliermarsh.ruff",
|
||||
},
|
||||
|
||||
6
.vscode/tasks.json
vendored
6
.vscode/tasks.json
vendored
@@ -45,7 +45,7 @@
|
||||
{
|
||||
"label": "Ruff",
|
||||
"type": "shell",
|
||||
"command": "prek run ruff-check --all-files",
|
||||
"command": "pre-commit run ruff-check --all-files",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
@@ -57,9 +57,9 @@
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Prek",
|
||||
"label": "Pre-commit",
|
||||
"type": "shell",
|
||||
"command": "prek run --show-diff-on-failure",
|
||||
"command": "pre-commit run --show-diff-on-failure",
|
||||
"group": {
|
||||
"kind": "test",
|
||||
"isDefault": true
|
||||
|
||||
2
CODEOWNERS
generated
2
CODEOWNERS
generated
@@ -1068,8 +1068,6 @@ build.json @home-assistant/supervisor
|
||||
/tests/components/myuplink/ @pajzo @astrandb
|
||||
/homeassistant/components/nam/ @bieniu
|
||||
/tests/components/nam/ @bieniu
|
||||
/homeassistant/components/namecheapdns/ @tr4nt0r
|
||||
/tests/components/namecheapdns/ @tr4nt0r
|
||||
/homeassistant/components/nanoleaf/ @milanmeu @joostlek
|
||||
/tests/components/nanoleaf/ @milanmeu @joostlek
|
||||
/homeassistant/components/nasweb/ @nasWebio
|
||||
|
||||
@@ -85,22 +85,6 @@ class AirzoneSystemEntity(AirzoneEntity):
|
||||
value = system[key]
|
||||
return value
|
||||
|
||||
async def _async_update_sys_params(self, params: dict[str, Any]) -> None:
|
||||
"""Send system parameters to API."""
|
||||
_params = {
|
||||
API_SYSTEM_ID: self.system_id,
|
||||
**params,
|
||||
}
|
||||
_LOGGER.debug("update_sys_params=%s", _params)
|
||||
try:
|
||||
await self.coordinator.airzone.set_sys_parameters(_params)
|
||||
except AirzoneError as error:
|
||||
raise HomeAssistantError(
|
||||
f"Failed to set system {self.entity_id}: {error}"
|
||||
) from error
|
||||
|
||||
self.coordinator.async_set_updated_data(self.coordinator.airzone.data())
|
||||
|
||||
|
||||
class AirzoneHotWaterEntity(AirzoneEntity):
|
||||
"""Define an Airzone Hot Water entity."""
|
||||
|
||||
@@ -20,7 +20,6 @@ from aioairzone.const import (
|
||||
AZD_MODES,
|
||||
AZD_Q_ADAPT,
|
||||
AZD_SLEEP,
|
||||
AZD_SYSTEMS,
|
||||
AZD_ZONES,
|
||||
)
|
||||
|
||||
@@ -31,7 +30,7 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import AirzoneConfigEntry, AirzoneUpdateCoordinator
|
||||
from .entity import AirzoneEntity, AirzoneSystemEntity, AirzoneZoneEntity
|
||||
from .entity import AirzoneEntity, AirzoneZoneEntity
|
||||
|
||||
|
||||
@dataclass(frozen=True, kw_only=True)
|
||||
@@ -86,18 +85,6 @@ def main_zone_options(
|
||||
return [k for k, v in options.items() if v in modes]
|
||||
|
||||
|
||||
SYSTEM_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_Q_ADAPT,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
key=AZD_Q_ADAPT,
|
||||
options=list(Q_ADAPT_DICT),
|
||||
options_dict=Q_ADAPT_DICT,
|
||||
translation_key="q_adapt",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_MODE,
|
||||
@@ -106,6 +93,14 @@ MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = (
|
||||
options_fn=main_zone_options,
|
||||
translation_key="modes",
|
||||
),
|
||||
AirzoneSelectDescription(
|
||||
api_param=API_Q_ADAPT,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
key=AZD_Q_ADAPT,
|
||||
options=list(Q_ADAPT_DICT),
|
||||
options_dict=Q_ADAPT_DICT,
|
||||
translation_key="q_adapt",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
@@ -145,37 +140,16 @@ async def async_setup_entry(
|
||||
"""Add Airzone select from a config_entry."""
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
added_systems: set[str] = set()
|
||||
added_zones: set[str] = set()
|
||||
|
||||
def _async_entity_listener() -> None:
|
||||
"""Handle additions of select."""
|
||||
|
||||
entities: list[AirzoneBaseSelect] = []
|
||||
|
||||
systems_data = coordinator.data.get(AZD_SYSTEMS, {})
|
||||
received_systems = set(systems_data)
|
||||
new_systems = received_systems - added_systems
|
||||
if new_systems:
|
||||
entities.extend(
|
||||
AirzoneSystemSelect(
|
||||
coordinator,
|
||||
description,
|
||||
entry,
|
||||
system_id,
|
||||
systems_data.get(system_id),
|
||||
)
|
||||
for system_id in new_systems
|
||||
for description in SYSTEM_SELECT_TYPES
|
||||
if description.key in systems_data.get(system_id)
|
||||
)
|
||||
added_systems.update(new_systems)
|
||||
|
||||
zones_data = coordinator.data.get(AZD_ZONES, {})
|
||||
received_zones = set(zones_data)
|
||||
new_zones = received_zones - added_zones
|
||||
if new_zones:
|
||||
entities.extend(
|
||||
entities: list[AirzoneZoneSelect] = [
|
||||
AirzoneZoneSelect(
|
||||
coordinator,
|
||||
description,
|
||||
@@ -187,8 +161,8 @@ async def async_setup_entry(
|
||||
for description in MAIN_ZONE_SELECT_TYPES
|
||||
if description.key in zones_data.get(system_zone_id)
|
||||
and zones_data.get(system_zone_id).get(AZD_MASTER) is True
|
||||
)
|
||||
entities.extend(
|
||||
]
|
||||
entities += [
|
||||
AirzoneZoneSelect(
|
||||
coordinator,
|
||||
description,
|
||||
@@ -199,11 +173,10 @@ async def async_setup_entry(
|
||||
for system_zone_id in new_zones
|
||||
for description in ZONE_SELECT_TYPES
|
||||
if description.key in zones_data.get(system_zone_id)
|
||||
)
|
||||
]
|
||||
async_add_entities(entities)
|
||||
added_zones.update(new_zones)
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener))
|
||||
_async_entity_listener()
|
||||
|
||||
@@ -230,38 +203,6 @@ class AirzoneBaseSelect(AirzoneEntity, SelectEntity):
|
||||
self._attr_current_option = self._get_current_option()
|
||||
|
||||
|
||||
class AirzoneSystemSelect(AirzoneSystemEntity, AirzoneBaseSelect):
|
||||
"""Define an Airzone System select."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: AirzoneUpdateCoordinator,
|
||||
description: AirzoneSelectDescription,
|
||||
entry: ConfigEntry,
|
||||
system_id: str,
|
||||
system_data: dict[str, Any],
|
||||
) -> None:
|
||||
"""Initialize."""
|
||||
super().__init__(coordinator, entry, system_data)
|
||||
|
||||
self._attr_unique_id = f"{self._attr_unique_id}_{system_id}_{description.key}"
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_options = self.entity_description.options_fn(
|
||||
system_data, description.options_dict
|
||||
)
|
||||
|
||||
self.values_dict = {v: k for k, v in description.options_dict.items()}
|
||||
|
||||
self._async_update_attrs()
|
||||
|
||||
async def async_select_option(self, option: str) -> None:
|
||||
"""Change the selected option."""
|
||||
param = self.entity_description.api_param
|
||||
value = self.entity_description.options_dict[option]
|
||||
await self._async_update_sys_params({param: value})
|
||||
|
||||
|
||||
class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect):
|
||||
"""Define an Airzone Zone select."""
|
||||
|
||||
|
||||
@@ -4,8 +4,6 @@ from __future__ import annotations
|
||||
|
||||
import logging
|
||||
|
||||
import dateutil
|
||||
|
||||
from homeassistant.components.automation import automations_with_entity
|
||||
from homeassistant.components.script import scripts_with_entity
|
||||
from homeassistant.components.sensor import (
|
||||
@@ -181,7 +179,6 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
LAST_S_TEST: SensorEntityDescription(
|
||||
key=LAST_S_TEST,
|
||||
translation_key="last_self_test",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
),
|
||||
"lastxfer": SensorEntityDescription(
|
||||
key="lastxfer",
|
||||
@@ -235,7 +232,6 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"masterupd": SensorEntityDescription(
|
||||
key="masterupd",
|
||||
translation_key="master_update",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"maxlinev": SensorEntityDescription(
|
||||
@@ -369,7 +365,6 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"starttime": SensorEntityDescription(
|
||||
key="starttime",
|
||||
translation_key="startup_time",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"statflag": SensorEntityDescription(
|
||||
@@ -421,19 +416,16 @@ SENSORS: dict[str, SensorEntityDescription] = {
|
||||
"xoffbat": SensorEntityDescription(
|
||||
key="xoffbat",
|
||||
translation_key="transfer_from_battery",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"xoffbatt": SensorEntityDescription(
|
||||
key="xoffbatt",
|
||||
translation_key="transfer_from_battery",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
"xonbatt": SensorEntityDescription(
|
||||
key="xonbatt",
|
||||
translation_key="transfer_to_battery",
|
||||
device_class=SensorDeviceClass.TIMESTAMP,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
}
|
||||
@@ -537,13 +529,7 @@ class APCUPSdSensor(APCUPSdEntity, SensorEntity):
|
||||
self._attr_native_value = None
|
||||
return
|
||||
|
||||
data = self.coordinator.data[key]
|
||||
|
||||
if self.entity_description.device_class == SensorDeviceClass.TIMESTAMP:
|
||||
self._attr_native_value = dateutil.parser.parse(data)
|
||||
return
|
||||
|
||||
self._attr_native_value, inferred_unit = infer_unit(data)
|
||||
self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key])
|
||||
if not self.native_unit_of_measurement:
|
||||
self._attr_native_unit_of_measurement = inferred_unit
|
||||
|
||||
|
||||
@@ -3,8 +3,9 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
import math
|
||||
|
||||
from pymicro_vad import MicroVad
|
||||
from pysilero_vad import SileroVoiceActivityDetector
|
||||
from pyspeex_noise import AudioProcessor
|
||||
|
||||
from .const import BYTES_PER_CHUNK
|
||||
@@ -42,8 +43,8 @@ class AudioEnhancer(ABC):
|
||||
"""Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
|
||||
|
||||
class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs microVAD and speex."""
|
||||
class SileroVadSpeexEnhancer(AudioEnhancer):
|
||||
"""Audio enhancer that runs Silero VAD and speex."""
|
||||
|
||||
def __init__(
|
||||
self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool
|
||||
@@ -69,21 +70,49 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
self.noise_suppression,
|
||||
)
|
||||
|
||||
self.vad: MicroVad | None = None
|
||||
self.vad: SileroVoiceActivityDetector | None = None
|
||||
|
||||
# We get 10ms chunks but Silero works on 32ms chunks, so we have to
|
||||
# buffer audio. The previous speech probability is used until enough
|
||||
# audio has been buffered.
|
||||
self._vad_buffer: bytearray | None = None
|
||||
self._vad_buffer_chunks = 0
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
self._last_speech_probability: float | None = None
|
||||
|
||||
if self.is_vad_enabled:
|
||||
self.vad = MicroVad()
|
||||
_LOGGER.debug("Initialized microVAD")
|
||||
self.vad = SileroVoiceActivityDetector()
|
||||
|
||||
# VAD buffer is a multiple of 10ms, but Silero VAD needs 32ms.
|
||||
self._vad_buffer_chunks = int(
|
||||
math.ceil(self.vad.chunk_bytes() / BYTES_PER_CHUNK)
|
||||
)
|
||||
self._vad_leftover_bytes = self.vad.chunk_bytes() - BYTES_PER_CHUNK
|
||||
self._vad_buffer = bytearray(self.vad.chunk_bytes())
|
||||
_LOGGER.debug("Initialized Silero VAD")
|
||||
|
||||
def enhance_chunk(self, audio: bytes, timestamp_ms: int) -> EnhancedAudioChunk:
|
||||
"""Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples."""
|
||||
speech_probability: float | None = None
|
||||
|
||||
assert len(audio) == BYTES_PER_CHUNK
|
||||
|
||||
if self.vad is not None:
|
||||
# Run VAD
|
||||
speech_probability = self.vad.Process10ms(audio)
|
||||
assert self._vad_buffer is not None
|
||||
start_idx = self._vad_buffer_chunk_idx * BYTES_PER_CHUNK
|
||||
self._vad_buffer[start_idx : start_idx + BYTES_PER_CHUNK] = audio
|
||||
|
||||
self._vad_buffer_chunk_idx += 1
|
||||
if self._vad_buffer_chunk_idx >= self._vad_buffer_chunks:
|
||||
# We have enough data to run Silero VAD (32 ms)
|
||||
self._last_speech_probability = self.vad.process_chunk(
|
||||
self._vad_buffer[: self.vad.chunk_bytes()]
|
||||
)
|
||||
|
||||
# Copy leftover audio that wasn't processed to start
|
||||
self._vad_buffer[: self._vad_leftover_bytes] = self._vad_buffer[
|
||||
-self._vad_leftover_bytes :
|
||||
]
|
||||
self._vad_buffer_chunk_idx = 0
|
||||
|
||||
if self.audio_processor is not None:
|
||||
# Run noise suppression and auto gain
|
||||
@@ -92,5 +121,5 @@ class MicroVadSpeexEnhancer(AudioEnhancer):
|
||||
return EnhancedAudioChunk(
|
||||
audio=audio,
|
||||
timestamp_ms=timestamp_ms,
|
||||
speech_probability=speech_probability,
|
||||
speech_probability=self._last_speech_probability,
|
||||
)
|
||||
|
||||
@@ -8,5 +8,5 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "local_push",
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"]
|
||||
"requirements": ["pysilero-vad==3.2.0", "pyspeex-noise==1.0.2"]
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ from homeassistant.util import (
|
||||
from homeassistant.util.hass_dict import HassKey
|
||||
from homeassistant.util.limited_size_dict import LimitedSizeDict
|
||||
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer
|
||||
from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, SileroVadSpeexEnhancer
|
||||
from .const import (
|
||||
ACKNOWLEDGE_PATH,
|
||||
BYTES_PER_CHUNK,
|
||||
@@ -633,7 +633,7 @@ class PipelineRun:
|
||||
# Initialize with audio settings
|
||||
if self.audio_settings.needs_processor and (self.audio_enhancer is None):
|
||||
# Default audio enhancer
|
||||
self.audio_enhancer = MicroVadSpeexEnhancer(
|
||||
self.audio_enhancer = SileroVadSpeexEnhancer(
|
||||
self.audio_settings.auto_gain_dbfs,
|
||||
self.audio_settings.noise_suppression_level,
|
||||
self.audio_settings.is_vad_enabled,
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
"""The BSB-Lan integration."""
|
||||
|
||||
import asyncio
|
||||
import dataclasses
|
||||
|
||||
from bsblan import (
|
||||
@@ -78,16 +77,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
bsblan = BSBLAN(config, session)
|
||||
|
||||
try:
|
||||
# Initialize the client first - this sets up internal caches and validates
|
||||
# the connection by fetching firmware version
|
||||
# Initialize the client first - this sets up internal caches and validates the connection
|
||||
await bsblan.initialize()
|
||||
|
||||
# Fetch device metadata in parallel for faster startup
|
||||
device, info, static = await asyncio.gather(
|
||||
bsblan.device(),
|
||||
bsblan.info(),
|
||||
bsblan.static_values(),
|
||||
)
|
||||
# Fetch all required device metadata
|
||||
device = await bsblan.device()
|
||||
info = await bsblan.info()
|
||||
static = await bsblan.static_values()
|
||||
except BSBLANConnectionError as err:
|
||||
raise ConfigEntryNotReady(
|
||||
translation_domain=DOMAIN,
|
||||
@@ -115,10 +110,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: BSBLanConfigEntry) -> bo
|
||||
fast_coordinator = BSBLanFastCoordinator(hass, entry, bsblan)
|
||||
slow_coordinator = BSBLanSlowCoordinator(hass, entry, bsblan)
|
||||
|
||||
# Perform first refresh of fast coordinator (required for entities)
|
||||
# Perform first refresh of both coordinators
|
||||
await fast_coordinator.async_config_entry_first_refresh()
|
||||
|
||||
# Refresh slow coordinator - don't fail if DHW is not available
|
||||
# Try to refresh slow coordinator, but don't fail if DHW is not available
|
||||
# This allows the integration to work even if the device doesn't support DHW
|
||||
await slow_coordinator.async_refresh()
|
||||
|
||||
|
||||
@@ -111,17 +111,11 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
return None
|
||||
return self.coordinator.data.state.target_temperature.value
|
||||
|
||||
@property
|
||||
def _hvac_mode_value(self) -> int | str | None:
|
||||
"""Return the raw hvac_mode value from the coordinator."""
|
||||
if (hvac_mode := self.coordinator.data.state.hvac_mode) is None:
|
||||
return None
|
||||
return hvac_mode.value
|
||||
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode | None:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
if (hvac_mode_value := self._hvac_mode_value) is None:
|
||||
hvac_mode_value = self.coordinator.data.state.hvac_mode.value
|
||||
if hvac_mode_value is None:
|
||||
return None
|
||||
# BSB-Lan returns integer values: 0=off, 1=auto, 2=eco, 3=heat
|
||||
if isinstance(hvac_mode_value, int):
|
||||
@@ -131,8 +125,9 @@ class BSBLANClimate(BSBLanEntity, ClimateEntity):
|
||||
@property
|
||||
def preset_mode(self) -> str | None:
|
||||
"""Return the current preset mode."""
|
||||
hvac_mode_value = self.coordinator.data.state.hvac_mode.value
|
||||
# BSB-Lan mode 2 is eco/reduced mode
|
||||
if self._hvac_mode_value == 2:
|
||||
if hvac_mode_value == 2:
|
||||
return PRESET_ECO
|
||||
return PRESET_NONE
|
||||
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import timedelta
|
||||
from random import randint
|
||||
|
||||
from bsblan import (
|
||||
BSBLAN,
|
||||
@@ -22,17 +23,6 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda
|
||||
|
||||
from .const import DOMAIN, LOGGER, SCAN_INTERVAL_FAST, SCAN_INTERVAL_SLOW
|
||||
|
||||
# Filter lists for optimized API calls - only fetch parameters we actually use
|
||||
# This significantly reduces response time (~0.2s per parameter saved)
|
||||
STATE_INCLUDE = ["current_temperature", "target_temperature", "hvac_mode"]
|
||||
SENSOR_INCLUDE = ["current_temperature", "outside_temperature"]
|
||||
DHW_STATE_INCLUDE = [
|
||||
"operating_mode",
|
||||
"nominal_setpoint",
|
||||
"dhw_actual_value_top_temperature",
|
||||
]
|
||||
DHW_CONFIG_INCLUDE = ["reduced_setpoint", "nominal_setpoint_max"]
|
||||
|
||||
|
||||
@dataclass
|
||||
class BSBLanFastData:
|
||||
@@ -90,18 +80,26 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
config_entry,
|
||||
client,
|
||||
name=f"{DOMAIN}_fast_{config_entry.data[CONF_HOST]}",
|
||||
update_interval=SCAN_INTERVAL_FAST,
|
||||
update_interval=self._get_update_interval(),
|
||||
)
|
||||
|
||||
def _get_update_interval(self) -> timedelta:
|
||||
"""Get the update interval with a random offset.
|
||||
|
||||
Add a random number of seconds to avoid timeouts when
|
||||
the BSB-Lan device is already/still busy retrieving data,
|
||||
e.g. for MQTT or internal logging.
|
||||
"""
|
||||
return SCAN_INTERVAL_FAST + timedelta(seconds=randint(1, 8))
|
||||
|
||||
async def _async_update_data(self) -> BSBLanFastData:
|
||||
"""Fetch fast-changing data from the BSB-Lan device."""
|
||||
try:
|
||||
# Client is already initialized in async_setup_entry
|
||||
# Use include filtering to only fetch parameters we actually use
|
||||
# This reduces response time significantly (~0.2s per parameter)
|
||||
state = await self.client.state(include=STATE_INCLUDE)
|
||||
sensor = await self.client.sensor(include=SENSOR_INCLUDE)
|
||||
dhw = await self.client.hot_water_state(include=DHW_STATE_INCLUDE)
|
||||
# Fetch fast-changing data (state, sensor, DHW state)
|
||||
state = await self.client.state()
|
||||
sensor = await self.client.sensor()
|
||||
dhw = await self.client.hot_water_state()
|
||||
|
||||
except BSBLANAuthError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
@@ -113,6 +111,9 @@ class BSBLanFastCoordinator(BSBLanCoordinator[BSBLanFastData]):
|
||||
f"Error while establishing connection with BSB-Lan device at {host}"
|
||||
) from err
|
||||
|
||||
# Update the interval with random jitter for next update
|
||||
self.update_interval = self._get_update_interval()
|
||||
|
||||
return BSBLanFastData(
|
||||
state=state,
|
||||
sensor=sensor,
|
||||
@@ -142,8 +143,8 @@ class BSBLanSlowCoordinator(BSBLanCoordinator[BSBLanSlowData]):
|
||||
"""Fetch slow-changing data from the BSB-Lan device."""
|
||||
try:
|
||||
# Client is already initialized in async_setup_entry
|
||||
# Use include filtering to only fetch parameters we actually use
|
||||
dhw_config = await self.client.hot_water_config(include=DHW_CONFIG_INCLUDE)
|
||||
# Fetch slow-changing configuration data
|
||||
dhw_config = await self.client.hot_water_config()
|
||||
dhw_schedule = await self.client.hot_water_schedule()
|
||||
|
||||
except AttributeError:
|
||||
|
||||
@@ -29,11 +29,7 @@ class BSBLanEntityBase[_T: BSBLanCoordinator](CoordinatorEntity[_T]):
|
||||
connections={(CONNECTION_NETWORK_MAC, format_mac(mac))},
|
||||
name=data.device.name,
|
||||
manufacturer="BSBLAN Inc.",
|
||||
model=(
|
||||
data.info.device_identification.value
|
||||
if data.info.device_identification
|
||||
else None
|
||||
),
|
||||
model=data.info.device_identification.value,
|
||||
sw_version=data.device.version,
|
||||
configuration_url=f"http://{host}",
|
||||
)
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
"integration_type": "device",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["bsblan"],
|
||||
"requirements": ["python-bsblan==4.1.0"],
|
||||
"requirements": ["python-bsblan==3.1.6"],
|
||||
"zeroconf": [
|
||||
{
|
||||
"name": "bsb-lan*",
|
||||
|
||||
@@ -50,6 +50,7 @@ from . import (
|
||||
from .client import CloudClient
|
||||
from .const import (
|
||||
CONF_ACCOUNT_LINK_SERVER,
|
||||
CONF_ACCOUNTS_SERVER,
|
||||
CONF_ACME_SERVER,
|
||||
CONF_ALEXA,
|
||||
CONF_ALIASES,
|
||||
@@ -137,6 +138,7 @@ _BASE_CONFIG_SCHEMA = vol.Schema(
|
||||
vol.Optional(CONF_ALEXA): ALEXA_SCHEMA,
|
||||
vol.Optional(CONF_GOOGLE_ACTIONS): GACTIONS_SCHEMA,
|
||||
vol.Optional(CONF_ACCOUNT_LINK_SERVER): str,
|
||||
vol.Optional(CONF_ACCOUNTS_SERVER): str,
|
||||
vol.Optional(CONF_ACME_SERVER): str,
|
||||
vol.Optional(CONF_API_SERVER): str,
|
||||
vol.Optional(CONF_RELAYER_SERVER): str,
|
||||
|
||||
@@ -76,6 +76,7 @@ CONF_GOOGLE_ACTIONS = "google_actions"
|
||||
CONF_USER_POOL_ID = "user_pool_id"
|
||||
|
||||
CONF_ACCOUNT_LINK_SERVER = "account_link_server"
|
||||
CONF_ACCOUNTS_SERVER = "accounts_server"
|
||||
CONF_ACME_SERVER = "acme_server"
|
||||
CONF_API_SERVER = "api_server"
|
||||
CONF_DISCOVERY_SERVICE_ACTIONS = "discovery_service_actions"
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"integration_type": "system",
|
||||
"iot_class": "cloud_push",
|
||||
"loggers": ["acme", "hass_nabucasa", "snitun"],
|
||||
"requirements": ["hass-nabucasa==1.9.0"],
|
||||
"requirements": ["hass-nabucasa==1.7.0"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -7,5 +7,6 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_push",
|
||||
"loggers": ["datadog"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["datadog==0.52.0"]
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime, timedelta
|
||||
import logging
|
||||
@@ -99,29 +98,16 @@ class FireflyDataUpdateCoordinator(DataUpdateCoordinator[FireflyCoordinatorData]
|
||||
|
||||
try:
|
||||
accounts = await self.firefly.get_accounts()
|
||||
|
||||
(
|
||||
categories,
|
||||
primary_currency,
|
||||
budgets,
|
||||
bills,
|
||||
) = await asyncio.gather(
|
||||
self.firefly.get_categories(),
|
||||
self.firefly.get_currency_primary(),
|
||||
self.firefly.get_budgets(start=start_date, end=end_date),
|
||||
self.firefly.get_bills(),
|
||||
)
|
||||
|
||||
category_details = await asyncio.gather(
|
||||
*(
|
||||
self.firefly.get_category(
|
||||
category_id=int(category.id),
|
||||
start=start_date,
|
||||
end=end_date,
|
||||
)
|
||||
for category in categories
|
||||
categories = await self.firefly.get_categories()
|
||||
category_details = [
|
||||
await self.firefly.get_category(
|
||||
category_id=int(category.id), start=start_date, end=end_date
|
||||
)
|
||||
)
|
||||
for category in categories
|
||||
]
|
||||
primary_currency = await self.firefly.get_currency_primary()
|
||||
budgets = await self.firefly.get_budgets(start=start_date, end=end_date)
|
||||
bills = await self.firefly.get_bills()
|
||||
except FireflyAuthenticationError as err:
|
||||
raise ConfigEntryAuthFailed(
|
||||
translation_domain=DOMAIN,
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "local_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["pyfirefly==0.1.11"]
|
||||
"requirements": ["pyfirefly==0.1.10"]
|
||||
}
|
||||
|
||||
@@ -461,7 +461,7 @@ FITBIT_RESOURCES_LIST: Final[tuple[FitbitSensorEntityDescription, ...]] = (
|
||||
key="sleep/timeInBed",
|
||||
translation_key="sleep_time_in_bed",
|
||||
native_unit_of_measurement=UnitOfTime.MINUTES,
|
||||
icon="mdi:bed",
|
||||
icon="mdi:hotel",
|
||||
device_class=SensorDeviceClass.DURATION,
|
||||
scope=FitbitScope.SLEEP,
|
||||
state_class=SensorStateClass.TOTAL_INCREASING,
|
||||
|
||||
@@ -31,7 +31,7 @@ STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
)
|
||||
STEP_SMS_CODE_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_SMS_CODE): str,
|
||||
vol.Required(CONF_SMS_CODE): int,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -75,7 +75,7 @@ class FressnapfTrackerConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
return errors, False
|
||||
|
||||
async def _async_verify_sms_code(
|
||||
self, sms_code: str
|
||||
self, sms_code: int
|
||||
) -> tuple[dict[str, str], str | None]:
|
||||
"""Verify SMS code and return errors and access_token."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
@@ -7,5 +7,5 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "cloud_polling",
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fressnapftracker==0.2.1"]
|
||||
"requirements": ["fressnapftracker==0.2.0"]
|
||||
}
|
||||
|
||||
@@ -164,12 +164,13 @@ def _async_wol_buttons_list(
|
||||
class FritzBoxWOLButton(FritzDeviceBase, ButtonEntity):
|
||||
"""Defines a FRITZ!Box Tools Wake On LAN button."""
|
||||
|
||||
_attr_icon = "mdi:lan-pending"
|
||||
_attr_entity_registry_enabled_default = False
|
||||
_attr_translation_key = "wake_on_lan"
|
||||
|
||||
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
|
||||
"""Initialize Fritz!Box WOL button."""
|
||||
super().__init__(avm_wrapper, device)
|
||||
self._name = f"{self.hostname} Wake on LAN"
|
||||
self._attr_unique_id = f"{self._mac}_wake_on_lan"
|
||||
self._is_available = True
|
||||
|
||||
|
||||
@@ -10,7 +10,6 @@ from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.dispatcher import async_dispatcher_connect
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .const import DEFAULT_DEVICE_NAME
|
||||
from .coordinator import FRITZ_DATA_KEY, AvmWrapper, FritzConfigEntry, FritzData
|
||||
from .entity import FritzDeviceBase
|
||||
from .helpers import device_filter_out_from_trackers
|
||||
@@ -72,7 +71,6 @@ class FritzBoxTracker(FritzDeviceBase, ScannerEntity):
|
||||
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
|
||||
"""Initialize a FRITZ!Box device."""
|
||||
super().__init__(avm_wrapper, device)
|
||||
self._attr_name: str = device.hostname or DEFAULT_DEVICE_NAME
|
||||
self._last_activity: datetime.datetime | None = device.last_activity
|
||||
|
||||
@property
|
||||
|
||||
@@ -13,7 +13,7 @@ from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity import EntityDescription
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .const import DOMAIN
|
||||
from .const import DEFAULT_DEVICE_NAME, DOMAIN
|
||||
from .coordinator import AvmWrapper
|
||||
from .models import FritzDevice
|
||||
|
||||
@@ -21,17 +21,21 @@ from .models import FritzDevice
|
||||
class FritzDeviceBase(CoordinatorEntity[AvmWrapper]):
|
||||
"""Entity base class for a device connected to a FRITZ!Box device."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
|
||||
"""Initialize a FRITZ!Box device."""
|
||||
super().__init__(avm_wrapper)
|
||||
self._avm_wrapper = avm_wrapper
|
||||
self._mac: str = device.mac_address
|
||||
self._name: str = device.hostname or DEFAULT_DEVICE_NAME
|
||||
self._attr_device_info = DeviceInfo(
|
||||
connections={(dr.CONNECTION_NETWORK_MAC, device.mac_address)}
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return device name."""
|
||||
return self._name
|
||||
|
||||
@property
|
||||
def ip_address(self) -> str | None:
|
||||
"""Return the primary ip address of the device."""
|
||||
|
||||
@@ -3,9 +3,6 @@
|
||||
"button": {
|
||||
"cleanup": {
|
||||
"default": "mdi:broom"
|
||||
},
|
||||
"wake_on_lan": {
|
||||
"default": "mdi:lan-pending"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
@@ -51,11 +48,6 @@
|
||||
"max_kb_s_sent": {
|
||||
"default": "mdi:upload"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"internet_access": {
|
||||
"default": "mdi:router-wireless-settings"
|
||||
}
|
||||
}
|
||||
},
|
||||
"services": {
|
||||
|
||||
@@ -8,7 +8,6 @@
|
||||
"integration_type": "hub",
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["fritzconnection"],
|
||||
"quality_scale": "bronze",
|
||||
"requirements": ["fritzconnection[qr]==1.15.0", "xmltodict==1.0.2"],
|
||||
"ssdp": [
|
||||
{
|
||||
|
||||
@@ -13,7 +13,9 @@ rules:
|
||||
docs-removal-instructions: done
|
||||
entity-event-setup: done
|
||||
entity-unique-id: done
|
||||
has-entity-name: done
|
||||
has-entity-name:
|
||||
status: todo
|
||||
comment: partially done
|
||||
runtime-data: done
|
||||
test-before-configure: done
|
||||
test-before-setup: done
|
||||
|
||||
@@ -108,9 +108,6 @@
|
||||
},
|
||||
"reconnect": {
|
||||
"name": "Reconnect"
|
||||
},
|
||||
"wake_on_lan": {
|
||||
"name": "Wake on LAN"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
@@ -165,11 +162,6 @@
|
||||
"max_kb_s_sent": {
|
||||
"name": "Max connection upload throughput"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"internet_access": {
|
||||
"name": "Internet access"
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
|
||||
@@ -499,12 +499,13 @@ class FritzBoxDeflectionSwitch(FritzBoxBaseCoordinatorSwitch):
|
||||
class FritzBoxProfileSwitch(FritzDeviceBase, SwitchEntity):
|
||||
"""Defines a FRITZ!Box Tools DeviceProfile switch."""
|
||||
|
||||
_attr_translation_key = "internet_access"
|
||||
_attr_icon = "mdi:router-wireless-settings"
|
||||
|
||||
def __init__(self, avm_wrapper: AvmWrapper, device: FritzDevice) -> None:
|
||||
"""Init Fritz profile."""
|
||||
super().__init__(avm_wrapper, device)
|
||||
self._attr_is_on: bool = False
|
||||
self._name = f"{device.hostname} Internet Access"
|
||||
self._attr_unique_id = f"{self._mac}_internet_access"
|
||||
self._attr_entity_category = EntityCategory.CONFIG
|
||||
|
||||
|
||||
@@ -23,5 +23,5 @@
|
||||
"winter_mode": {}
|
||||
},
|
||||
"quality_scale": "internal",
|
||||
"requirements": ["home-assistant-frontend==20260107.1"]
|
||||
"requirements": ["home-assistant-frontend==20260107.0"]
|
||||
}
|
||||
|
||||
@@ -66,7 +66,6 @@ from .const import (
|
||||
CONF_COLD_TOLERANCE,
|
||||
CONF_HEATER,
|
||||
CONF_HOT_TOLERANCE,
|
||||
CONF_KEEP_ALIVE,
|
||||
CONF_MAX_TEMP,
|
||||
CONF_MIN_DUR,
|
||||
CONF_MIN_TEMP,
|
||||
@@ -82,6 +81,7 @@ _LOGGER = logging.getLogger(__name__)
|
||||
DEFAULT_NAME = "Generic Thermostat"
|
||||
|
||||
CONF_INITIAL_HVAC_MODE = "initial_hvac_mode"
|
||||
CONF_KEEP_ALIVE = "keep_alive"
|
||||
CONF_PRECISION = "precision"
|
||||
CONF_TARGET_TEMP = "target_temp"
|
||||
CONF_TEMP_STEP = "target_temp_step"
|
||||
|
||||
@@ -21,7 +21,6 @@ from .const import (
|
||||
CONF_COLD_TOLERANCE,
|
||||
CONF_HEATER,
|
||||
CONF_HOT_TOLERANCE,
|
||||
CONF_KEEP_ALIVE,
|
||||
CONF_MAX_TEMP,
|
||||
CONF_MIN_DUR,
|
||||
CONF_MIN_TEMP,
|
||||
@@ -60,9 +59,6 @@ OPTIONS_SCHEMA = {
|
||||
vol.Optional(CONF_MIN_DUR): selector.DurationSelector(
|
||||
selector.DurationSelectorConfig(allow_negative=False)
|
||||
),
|
||||
vol.Optional(CONF_KEEP_ALIVE): selector.DurationSelector(
|
||||
selector.DurationSelectorConfig(allow_negative=False)
|
||||
),
|
||||
vol.Optional(CONF_MIN_TEMP): selector.NumberSelector(
|
||||
selector.NumberSelectorConfig(
|
||||
mode=selector.NumberSelectorMode.BOX, unit_of_measurement=DEGREE, step=0.1
|
||||
|
||||
@@ -33,5 +33,4 @@ CONF_PRESETS = {
|
||||
)
|
||||
}
|
||||
CONF_SENSOR = "target_sensor"
|
||||
CONF_KEEP_ALIVE = "keep_alive"
|
||||
DEFAULT_TOLERANCE = 0.3
|
||||
|
||||
@@ -18,7 +18,6 @@
|
||||
"cold_tolerance": "Cold tolerance",
|
||||
"heater": "Actuator switch",
|
||||
"hot_tolerance": "Hot tolerance",
|
||||
"keep_alive": "Keep-alive interval",
|
||||
"max_temp": "Maximum target temperature",
|
||||
"min_cycle_duration": "Minimum cycle duration",
|
||||
"min_temp": "Minimum target temperature",
|
||||
@@ -30,7 +29,6 @@
|
||||
"cold_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched on. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will start when the sensor goes below 24.5.",
|
||||
"heater": "Switch entity used to cool or heat depending on A/C mode.",
|
||||
"hot_tolerance": "Minimum amount of difference between the temperature read by the temperature sensor the target temperature that must change prior to being switched off. For example, if the target temperature is 25 and the tolerance is 0.5 the heater will stop when the sensor equals or goes above 25.5.",
|
||||
"keep_alive": "Trigger the heater periodically to keep devices from losing state. When set, min cycle duration is ignored.",
|
||||
"min_cycle_duration": "Set a minimum amount of time that the switch specified must be in its current state prior to being switched either off or on.",
|
||||
"target_sensor": "Temperature sensor that reflects the current temperature."
|
||||
},
|
||||
@@ -47,7 +45,6 @@
|
||||
"cold_tolerance": "[%key:component::generic_thermostat::config::step::user::data::cold_tolerance%]",
|
||||
"heater": "[%key:component::generic_thermostat::config::step::user::data::heater%]",
|
||||
"hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data::hot_tolerance%]",
|
||||
"keep_alive": "[%key:component::generic_thermostat::config::step::user::data::keep_alive%]",
|
||||
"max_temp": "[%key:component::generic_thermostat::config::step::user::data::max_temp%]",
|
||||
"min_cycle_duration": "[%key:component::generic_thermostat::config::step::user::data::min_cycle_duration%]",
|
||||
"min_temp": "[%key:component::generic_thermostat::config::step::user::data::min_temp%]",
|
||||
@@ -58,7 +55,6 @@
|
||||
"cold_tolerance": "[%key:component::generic_thermostat::config::step::user::data_description::cold_tolerance%]",
|
||||
"heater": "[%key:component::generic_thermostat::config::step::user::data_description::heater%]",
|
||||
"hot_tolerance": "[%key:component::generic_thermostat::config::step::user::data_description::hot_tolerance%]",
|
||||
"keep_alive": "[%key:component::generic_thermostat::config::step::user::data_description::keep_alive%]",
|
||||
"min_cycle_duration": "[%key:component::generic_thermostat::config::step::user::data_description::min_cycle_duration%]",
|
||||
"target_sensor": "[%key:component::generic_thermostat::config::step::user::data_description::target_sensor%]"
|
||||
}
|
||||
|
||||
@@ -1,21 +1,9 @@
|
||||
{
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"ammonia": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"benzene": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"nitrogen_dioxide": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"nitrogen_monoxide": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"non_methane_hydrocarbons": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
"ozone": {
|
||||
"default": "mdi:molecule"
|
||||
},
|
||||
|
||||
@@ -99,52 +99,18 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
"local_aqi": data.indexes[1].display_name
|
||||
},
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="c6h6",
|
||||
translation_key="benzene",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.c6h6.concentration.units,
|
||||
value_fn=lambda x: x.pollutants.c6h6.concentration.value,
|
||||
exists_fn=lambda x: "c6h6" in {p.code for p in x.pollutants},
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="co",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.CO,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.co.concentration.units,
|
||||
exists_fn=lambda x: "co" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.co.concentration.value,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="nh3",
|
||||
translation_key="ammonia",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.nh3.concentration.units,
|
||||
value_fn=lambda x: x.pollutants.nh3.concentration.value,
|
||||
exists_fn=lambda x: "nh3" in {p.code for p in x.pollutants},
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="nmhc",
|
||||
translation_key="non_methane_hydrocarbons",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.nmhc.concentration.units,
|
||||
value_fn=lambda x: x.pollutants.nmhc.concentration.value,
|
||||
exists_fn=lambda x: "nmhc" in {p.code for p in x.pollutants},
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="no",
|
||||
translation_key="nitrogen_monoxide",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.no.concentration.units,
|
||||
value_fn=lambda x: x.pollutants.no.concentration.value,
|
||||
exists_fn=lambda x: "no" in {p.code for p in x.pollutants},
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
key="no2",
|
||||
translation_key="nitrogen_dioxide",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.no2.concentration.units,
|
||||
exists_fn=lambda x: "no2" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.no2.concentration.value,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
@@ -152,7 +118,6 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
translation_key="ozone",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.o3.concentration.units,
|
||||
exists_fn=lambda x: "o3" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.o3.concentration.value,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
@@ -160,7 +125,6 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.PM10,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.pm10.concentration.units,
|
||||
exists_fn=lambda x: "pm10" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.pm10.concentration.value,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
@@ -168,7 +132,6 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
device_class=SensorDeviceClass.PM25,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.pm25.concentration.units,
|
||||
exists_fn=lambda x: "pm25" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.pm25.concentration.value,
|
||||
),
|
||||
AirQualitySensorEntityDescription(
|
||||
@@ -176,7 +139,6 @@ AIR_QUALITY_SENSOR_TYPES: tuple[AirQualitySensorEntityDescription, ...] = (
|
||||
translation_key="sulphur_dioxide",
|
||||
state_class=SensorStateClass.MEASUREMENT,
|
||||
native_unit_of_measurement_fn=lambda x: x.pollutants.so2.concentration.units,
|
||||
exists_fn=lambda x: "so2" in {p.code for p in x.pollutants},
|
||||
value_fn=lambda x: x.pollutants.so2.concentration.value,
|
||||
),
|
||||
)
|
||||
|
||||
@@ -76,12 +76,6 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"ammonia": {
|
||||
"name": "Ammonia"
|
||||
},
|
||||
"benzene": {
|
||||
"name": "Benzene"
|
||||
},
|
||||
"local_aqi": {
|
||||
"name": "{local_aqi} AQI"
|
||||
},
|
||||
@@ -195,9 +189,6 @@
|
||||
"name": "{local_aqi} dominant pollutant",
|
||||
"state": {
|
||||
"co": "[%key:component::sensor::entity_component::carbon_monoxide::name%]",
|
||||
"nh3": "[%key:component::google_air_quality::entity::sensor::ammonia::name%]",
|
||||
"nmhc": "[%key:component::google_air_quality::entity::sensor::non_methane_hydrocarbons::name%]",
|
||||
"no": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]",
|
||||
"no2": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]",
|
||||
"o3": "[%key:component::sensor::entity_component::ozone::name%]",
|
||||
"pm10": "[%key:component::sensor::entity_component::pm10::name%]",
|
||||
@@ -208,12 +199,6 @@
|
||||
"nitrogen_dioxide": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_dioxide::name%]"
|
||||
},
|
||||
"nitrogen_monoxide": {
|
||||
"name": "[%key:component::sensor::entity_component::nitrogen_monoxide::name%]"
|
||||
},
|
||||
"non_methane_hydrocarbons": {
|
||||
"name": "Non-methane hydrocarbons"
|
||||
},
|
||||
"ozone": {
|
||||
"name": "[%key:component::sensor::entity_component::ozone::name%]"
|
||||
},
|
||||
|
||||
@@ -374,7 +374,7 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
def async_update_group_state(self) -> None:
|
||||
"""Query all members and determine the sensor group state."""
|
||||
self.calculate_state_attributes(self._get_valid_entities())
|
||||
states: list[str | None] = []
|
||||
states: list[str] = []
|
||||
valid_units = self._valid_units
|
||||
valid_states: list[bool] = []
|
||||
sensor_values: list[tuple[str, float, State]] = []
|
||||
@@ -435,12 +435,9 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
state.attributes.get("unit_of_measurement"),
|
||||
self.entity_id,
|
||||
)
|
||||
else:
|
||||
states.append(None)
|
||||
valid_states.append(False)
|
||||
|
||||
# Set group as unavailable if all members are unavailable or missing
|
||||
self._attr_available = not all(s in (STATE_UNAVAILABLE, None) for s in states)
|
||||
# Set group as unavailable if all members do not have numeric values
|
||||
self._attr_available = any(numeric_state for numeric_state in valid_states)
|
||||
|
||||
valid_state = self.mode(
|
||||
state not in (STATE_UNKNOWN, STATE_UNAVAILABLE) for state in states
|
||||
@@ -449,7 +446,6 @@ class SensorGroup(GroupEntity, SensorEntity):
|
||||
|
||||
if not valid_state or not valid_state_numeric:
|
||||
self._attr_native_value = None
|
||||
self._extra_state_attribute = {}
|
||||
return
|
||||
|
||||
# Calculate values
|
||||
|
||||
@@ -8,7 +8,6 @@ from .coordinator import HDFuryConfigEntry, HDFuryCoordinator
|
||||
PLATFORMS = [
|
||||
Platform.BUTTON,
|
||||
Platform.SELECT,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
"""Diagnostics for HDFury Integration."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
|
||||
from .coordinator import HDFuryCoordinator
|
||||
|
||||
|
||||
async def async_get_config_entry_diagnostics(
|
||||
hass: HomeAssistant, entry: ConfigEntry
|
||||
) -> dict[str, Any]:
|
||||
"""Return diagnostics for a config entry."""
|
||||
coordinator: HDFuryCoordinator = entry.runtime_data
|
||||
|
||||
return {
|
||||
"board": coordinator.data.board,
|
||||
"info": coordinator.data.info,
|
||||
"config": coordinator.data.config,
|
||||
}
|
||||
@@ -16,50 +16,6 @@
|
||||
"default": "mdi:hdmi-port"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"aud0": {
|
||||
"default": "mdi:audio-input-rca"
|
||||
},
|
||||
"aud1": {
|
||||
"default": "mdi:audio-input-rca"
|
||||
},
|
||||
"audout": {
|
||||
"default": "mdi:television-speaker"
|
||||
},
|
||||
"earcrx": {
|
||||
"default": "mdi:audio-video"
|
||||
},
|
||||
"edida0": {
|
||||
"default": "mdi:format-list-text"
|
||||
},
|
||||
"edida1": {
|
||||
"default": "mdi:format-list-text"
|
||||
},
|
||||
"edida2": {
|
||||
"default": "mdi:format-list-text"
|
||||
},
|
||||
"rx0": {
|
||||
"default": "mdi:video-input-hdmi"
|
||||
},
|
||||
"rx1": {
|
||||
"default": "mdi:video-input-hdmi"
|
||||
},
|
||||
"sink0": {
|
||||
"default": "mdi:television"
|
||||
},
|
||||
"sink1": {
|
||||
"default": "mdi:television"
|
||||
},
|
||||
"sink2": {
|
||||
"default": "mdi:audio-video"
|
||||
},
|
||||
"tx0": {
|
||||
"default": "mdi:cable-data"
|
||||
},
|
||||
"tx1": {
|
||||
"default": "mdi:cable-data"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"autosw": {
|
||||
"default": "mdi:import"
|
||||
|
||||
@@ -43,7 +43,7 @@ rules:
|
||||
|
||||
# Gold
|
||||
devices: done
|
||||
diagnostics: done
|
||||
diagnostics: todo
|
||||
discovery-update-info: todo
|
||||
discovery: todo
|
||||
docs-data-update: todo
|
||||
|
||||
@@ -1,121 +0,0 @@
|
||||
"""Sensor platform for HDFury Integration."""
|
||||
|
||||
from homeassistant.components.sensor import SensorEntity, SensorEntityDescription
|
||||
from homeassistant.const import EntityCategory
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import HDFuryConfigEntry
|
||||
from .entity import HDFuryEntity
|
||||
|
||||
SENSORS: tuple[SensorEntityDescription, ...] = (
|
||||
SensorEntityDescription(
|
||||
key="RX0",
|
||||
translation_key="rx0",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="RX1",
|
||||
translation_key="rx1",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="TX0",
|
||||
translation_key="tx0",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="TX1",
|
||||
translation_key="tx1",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="AUD0",
|
||||
translation_key="aud0",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="AUD1",
|
||||
translation_key="aud1",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="AUDOUT",
|
||||
translation_key="audout",
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="EARCRX",
|
||||
translation_key="earcrx",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="SINK0",
|
||||
translation_key="sink0",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="SINK1",
|
||||
translation_key="sink1",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="SINK2",
|
||||
translation_key="sink2",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="EDIDA0",
|
||||
translation_key="edida0",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="EDIDA1",
|
||||
translation_key="edida1",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
SensorEntityDescription(
|
||||
key="EDIDA2",
|
||||
translation_key="edida2",
|
||||
entity_registry_enabled_default=False,
|
||||
entity_category=EntityCategory.DIAGNOSTIC,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HDFuryConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up sensors using the platform schema."""
|
||||
|
||||
coordinator = entry.runtime_data
|
||||
|
||||
async_add_entities(
|
||||
HDFurySensor(coordinator, description)
|
||||
for description in SENSORS
|
||||
if description.key in coordinator.data.info
|
||||
)
|
||||
|
||||
|
||||
class HDFurySensor(HDFuryEntity, SensorEntity):
|
||||
"""Base HDFury Sensor Class."""
|
||||
|
||||
entity_description: SensorEntityDescription
|
||||
|
||||
@property
|
||||
def native_value(self) -> str:
|
||||
"""Set Sensor Value."""
|
||||
|
||||
return self.coordinator.data.info[self.entity_description.key]
|
||||
@@ -57,50 +57,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"aud0": {
|
||||
"name": "Audio TX0"
|
||||
},
|
||||
"aud1": {
|
||||
"name": "Audio TX1"
|
||||
},
|
||||
"audout": {
|
||||
"name": "Audio output"
|
||||
},
|
||||
"earcrx": {
|
||||
"name": "eARC/ARC status"
|
||||
},
|
||||
"edida0": {
|
||||
"name": "EDID TXA0"
|
||||
},
|
||||
"edida1": {
|
||||
"name": "EDID TXA1"
|
||||
},
|
||||
"edida2": {
|
||||
"name": "EDID AUDA"
|
||||
},
|
||||
"rx0": {
|
||||
"name": "Input RX0"
|
||||
},
|
||||
"rx1": {
|
||||
"name": "Input RX1"
|
||||
},
|
||||
"sink0": {
|
||||
"name": "EDID TX0"
|
||||
},
|
||||
"sink1": {
|
||||
"name": "EDID TX1"
|
||||
},
|
||||
"sink2": {
|
||||
"name": "EDID AUD"
|
||||
},
|
||||
"tx0": {
|
||||
"name": "Output TX0"
|
||||
},
|
||||
"tx1": {
|
||||
"name": "Output TX1"
|
||||
}
|
||||
},
|
||||
"switch": {
|
||||
"autosw": {
|
||||
"name": "Auto switch inputs"
|
||||
|
||||
@@ -5,7 +5,6 @@ from __future__ import annotations
|
||||
from dataclasses import dataclass
|
||||
import logging
|
||||
|
||||
from pyhik.constants import SENSOR_MAP
|
||||
from pyhik.hikvision import HikCamera
|
||||
import requests
|
||||
|
||||
@@ -20,13 +19,10 @@ from homeassistant.const import (
|
||||
)
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryNotReady
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
PLATFORMS = [Platform.BINARY_SENSOR, Platform.CAMERA]
|
||||
PLATFORMS = [Platform.BINARY_SENSOR]
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -74,49 +70,19 @@ async def async_setup_entry(hass: HomeAssistant, entry: HikvisionConfigEntry) ->
|
||||
device_type=device_type,
|
||||
)
|
||||
|
||||
_LOGGER.debug(
|
||||
"Device %s (type=%s) initial event_states: %s",
|
||||
device_name,
|
||||
device_type,
|
||||
camera.current_event_states,
|
||||
)
|
||||
|
||||
# For NVRs or devices with no detected events, try to fetch events from ISAPI
|
||||
# Use broader notification methods for NVRs since they often use 'record' etc.
|
||||
if device_type == "NVR" or not camera.current_event_states:
|
||||
nvr_notification_methods = {"center", "HTTP", "record", "email", "beep"}
|
||||
|
||||
def fetch_and_inject_nvr_events() -> None:
|
||||
"""Fetch and inject NVR events in a single executor job."""
|
||||
nvr_events = camera.get_event_triggers(nvr_notification_methods)
|
||||
_LOGGER.debug("NVR events fetched with extended methods: %s", nvr_events)
|
||||
if nvr_events:
|
||||
# Map raw event type names to friendly names using SENSOR_MAP
|
||||
mapped_events: dict[str, list[int]] = {}
|
||||
for event_type, channels in nvr_events.items():
|
||||
friendly_name = SENSOR_MAP.get(event_type.lower(), event_type)
|
||||
if friendly_name in mapped_events:
|
||||
mapped_events[friendly_name].extend(channels)
|
||||
else:
|
||||
mapped_events[friendly_name] = list(channels)
|
||||
_LOGGER.debug("Mapped NVR events: %s", mapped_events)
|
||||
camera.inject_events(mapped_events)
|
||||
if nvr_events := camera.get_event_triggers():
|
||||
camera.inject_events(nvr_events)
|
||||
|
||||
await hass.async_add_executor_job(fetch_and_inject_nvr_events)
|
||||
|
||||
# Start the event stream
|
||||
await hass.async_add_executor_job(camera.start_stream)
|
||||
|
||||
# Register the main device before platforms that use via_device
|
||||
device_registry = dr.async_get(hass)
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
identifiers={(DOMAIN, device_id)},
|
||||
name=device_name,
|
||||
manufacturer="Hikvision",
|
||||
model=device_type,
|
||||
)
|
||||
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
|
||||
return True
|
||||
|
||||
@@ -185,27 +185,20 @@ class HikvisionBinarySensor(BinarySensorEntity):
|
||||
# Build unique ID
|
||||
self._attr_unique_id = f"{self._data.device_id}_{sensor_type}_{channel}"
|
||||
|
||||
# Device info for device registry
|
||||
# Build entity name based on device type
|
||||
if self._data.device_type == "NVR":
|
||||
# NVR channels get their own device linked to the NVR via via_device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
|
||||
via_device=(DOMAIN, self._data.device_id),
|
||||
name=f"{self._data.device_name} Channel {channel}",
|
||||
manufacturer="Hikvision",
|
||||
model="NVR Channel",
|
||||
)
|
||||
self._attr_name = sensor_type
|
||||
self._attr_name = f"{sensor_type} {channel}"
|
||||
else:
|
||||
# Single camera device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._data.device_id)},
|
||||
name=self._data.device_name,
|
||||
manufacturer="Hikvision",
|
||||
model=self._data.device_type,
|
||||
)
|
||||
self._attr_name = sensor_type
|
||||
|
||||
# Device info for device registry
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._data.device_id)},
|
||||
name=self._data.device_name,
|
||||
manufacturer="Hikvision",
|
||||
model=self._data.device_type,
|
||||
)
|
||||
|
||||
# Set device class
|
||||
self._attr_device_class = DEVICE_CLASS_MAP.get(sensor_type)
|
||||
|
||||
|
||||
@@ -1,93 +0,0 @@
|
||||
"""Support for Hikvision cameras."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.components.camera import Camera, CameraEntityFeature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import HomeAssistantError
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import HikvisionConfigEntry
|
||||
from .const import DOMAIN
|
||||
|
||||
PARALLEL_UPDATES = 0
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
entry: HikvisionConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up Hikvision cameras from a config entry."""
|
||||
data = entry.runtime_data
|
||||
camera = data.camera
|
||||
|
||||
# Get available channels from the library
|
||||
channels = await hass.async_add_executor_job(camera.get_channels)
|
||||
|
||||
if channels:
|
||||
entities = [HikvisionCamera(entry, channel) for channel in channels]
|
||||
else:
|
||||
# Fallback to single camera if no channels detected
|
||||
entities = [HikvisionCamera(entry, 1)]
|
||||
|
||||
async_add_entities(entities)
|
||||
|
||||
|
||||
class HikvisionCamera(Camera):
|
||||
"""Representation of a Hikvision camera."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
_attr_supported_features = CameraEntityFeature.STREAM
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
entry: HikvisionConfigEntry,
|
||||
channel: int,
|
||||
) -> None:
|
||||
"""Initialize the camera."""
|
||||
super().__init__()
|
||||
self._data = entry.runtime_data
|
||||
self._channel = channel
|
||||
self._camera = self._data.camera
|
||||
|
||||
# Build unique ID (unique per platform per integration)
|
||||
self._attr_unique_id = f"{self._data.device_id}_{channel}"
|
||||
|
||||
# Device info for device registry
|
||||
if self._data.device_type == "NVR":
|
||||
# NVR channels get their own device linked to the NVR via via_device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{self._data.device_id}_{channel}")},
|
||||
via_device=(DOMAIN, self._data.device_id),
|
||||
name=f"{self._data.device_name} Channel {channel}",
|
||||
manufacturer="Hikvision",
|
||||
model="NVR Channel",
|
||||
)
|
||||
else:
|
||||
# Single camera device
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._data.device_id)},
|
||||
name=self._data.device_name,
|
||||
manufacturer="Hikvision",
|
||||
model=self._data.device_type,
|
||||
)
|
||||
|
||||
async def async_camera_image(
|
||||
self, width: int | None = None, height: int | None = None
|
||||
) -> bytes | None:
|
||||
"""Return a still image from the camera."""
|
||||
try:
|
||||
return await self.hass.async_add_executor_job(
|
||||
self._camera.get_snapshot, self._channel
|
||||
)
|
||||
except Exception as err:
|
||||
raise HomeAssistantError(
|
||||
f"Error getting image from {self._data.device_name} channel {self._channel}: {err}"
|
||||
) from err
|
||||
|
||||
async def stream_source(self) -> str | None:
|
||||
"""Return the stream source URL."""
|
||||
return self._camera.get_stream_url(self._channel)
|
||||
@@ -220,33 +220,31 @@ def get_accessory( # noqa: C901
|
||||
a_type = "TemperatureSensor"
|
||||
elif device_class == SensorDeviceClass.HUMIDITY and unit == PERCENTAGE:
|
||||
a_type = "HumiditySensor"
|
||||
elif device_class == SensorDeviceClass.PM10:
|
||||
elif (
|
||||
device_class == SensorDeviceClass.PM10
|
||||
or SensorDeviceClass.PM10 in state.entity_id
|
||||
):
|
||||
a_type = "PM10Sensor"
|
||||
elif device_class == SensorDeviceClass.PM25:
|
||||
elif (
|
||||
device_class == SensorDeviceClass.PM25
|
||||
or SensorDeviceClass.PM25 in state.entity_id
|
||||
):
|
||||
a_type = "PM25Sensor"
|
||||
elif device_class == SensorDeviceClass.NITROGEN_DIOXIDE:
|
||||
a_type = "NitrogenDioxideSensor"
|
||||
elif device_class == SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS:
|
||||
a_type = "VolatileOrganicCompoundsSensor"
|
||||
elif device_class == SensorDeviceClass.GAS:
|
||||
elif (
|
||||
device_class == SensorDeviceClass.GAS
|
||||
or SensorDeviceClass.GAS in state.entity_id
|
||||
):
|
||||
a_type = "AirQualitySensor"
|
||||
elif device_class == SensorDeviceClass.CO:
|
||||
a_type = "CarbonMonoxideSensor"
|
||||
elif device_class == SensorDeviceClass.CO2:
|
||||
elif device_class == SensorDeviceClass.CO2 or "co2" in state.entity_id:
|
||||
a_type = "CarbonDioxideSensor"
|
||||
elif device_class == SensorDeviceClass.ILLUMINANCE or unit == LIGHT_LUX:
|
||||
a_type = "LightSensor"
|
||||
|
||||
# Fallbacks based on entity_id
|
||||
elif SensorDeviceClass.PM10 in state.entity_id:
|
||||
a_type = "PM10Sensor"
|
||||
elif SensorDeviceClass.PM25 in state.entity_id:
|
||||
a_type = "PM25Sensor"
|
||||
elif SensorDeviceClass.GAS in state.entity_id:
|
||||
a_type = "AirQualitySensor"
|
||||
elif "co2" in state.entity_id:
|
||||
a_type = "CarbonDioxideSensor"
|
||||
|
||||
else:
|
||||
_LOGGER.debug(
|
||||
"%s: Unsupported sensor type (device_class=%s) (unit=%s)",
|
||||
|
||||
@@ -13,6 +13,6 @@
|
||||
"iot_class": "local_polling",
|
||||
"loggers": ["homewizard_energy"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": ["python-homewizard-energy==10.0.1"],
|
||||
"requirements": ["python-homewizard-energy==10.0.0"],
|
||||
"zeroconf": ["_hwenergy._tcp.local.", "_homewizard._tcp.local."]
|
||||
}
|
||||
|
||||
@@ -27,7 +27,7 @@ from .const import (
|
||||
SUPPORTED_PLATFORMS_UI,
|
||||
SUPPORTED_PLATFORMS_YAML,
|
||||
)
|
||||
from .expose import create_combined_knx_exposure
|
||||
from .expose import create_knx_exposure
|
||||
from .knx_module import KNXModule
|
||||
from .project import STORAGE_KEY as PROJECT_STORAGE_KEY
|
||||
from .schema import (
|
||||
@@ -121,10 +121,10 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
hass.data[KNX_MODULE_KEY] = knx_module
|
||||
|
||||
if CONF_KNX_EXPOSE in config:
|
||||
knx_module.yaml_exposures.extend(
|
||||
create_combined_knx_exposure(hass, knx_module.xknx, config[CONF_KNX_EXPOSE])
|
||||
)
|
||||
|
||||
for expose_config in config[CONF_KNX_EXPOSE]:
|
||||
knx_module.exposures.append(
|
||||
create_knx_exposure(hass, knx_module.xknx, expose_config)
|
||||
)
|
||||
configured_platforms_yaml = {
|
||||
platform for platform in SUPPORTED_PLATFORMS_YAML if platform in config
|
||||
}
|
||||
@@ -149,9 +149,7 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
|
||||
# if not loaded directly return
|
||||
return True
|
||||
|
||||
for exposure in knx_module.yaml_exposures:
|
||||
exposure.async_remove()
|
||||
for exposure in knx_module.service_exposures.values():
|
||||
for exposure in knx_module.exposures:
|
||||
exposure.async_remove()
|
||||
|
||||
configured_platforms_yaml = {
|
||||
|
||||
@@ -2,22 +2,14 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from asyncio import TaskGroup
|
||||
from collections.abc import Callable, Iterable
|
||||
from dataclasses import dataclass
|
||||
from collections.abc import Callable
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from xknx import XKNX
|
||||
from xknx.devices import DateDevice, DateTimeDevice, ExposeSensor, TimeDevice
|
||||
from xknx.dpt import DPTBase, DPTNumeric, DPTString
|
||||
from xknx.dpt.dpt_1 import DPT1BitEnum, DPTSwitch
|
||||
from xknx.dpt import DPTNumeric, DPTString
|
||||
from xknx.exceptions import ConversionError
|
||||
from xknx.telegram.address import (
|
||||
GroupAddress,
|
||||
InternalGroupAddress,
|
||||
parse_device_group_address,
|
||||
)
|
||||
from xknx.remote_value import RemoteValueSensor
|
||||
|
||||
from homeassistant.const import (
|
||||
CONF_ENTITY_ID,
|
||||
@@ -49,159 +41,79 @@ _LOGGER = logging.getLogger(__name__)
|
||||
@callback
|
||||
def create_knx_exposure(
|
||||
hass: HomeAssistant, xknx: XKNX, config: ConfigType
|
||||
) -> KnxExposeEntity | KnxExposeTime:
|
||||
"""Create single exposure."""
|
||||
) -> KNXExposeSensor | KNXExposeTime:
|
||||
"""Create exposures from config."""
|
||||
|
||||
expose_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
exposure: KnxExposeEntity | KnxExposeTime
|
||||
|
||||
exposure: KNXExposeSensor | KNXExposeTime
|
||||
if (
|
||||
isinstance(expose_type, str)
|
||||
and expose_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES
|
||||
):
|
||||
exposure = KnxExposeTime(
|
||||
exposure = KNXExposeTime(
|
||||
xknx=xknx,
|
||||
config=config,
|
||||
)
|
||||
else:
|
||||
exposure = KnxExposeEntity(
|
||||
hass=hass,
|
||||
exposure = KNXExposeSensor(
|
||||
hass,
|
||||
xknx=xknx,
|
||||
entity_id=config[CONF_ENTITY_ID],
|
||||
options=(_yaml_config_to_expose_options(config),),
|
||||
config=config,
|
||||
)
|
||||
exposure.async_register()
|
||||
return exposure
|
||||
|
||||
|
||||
@callback
|
||||
def create_combined_knx_exposure(
|
||||
hass: HomeAssistant, xknx: XKNX, configs: list[ConfigType]
|
||||
) -> list[KnxExposeEntity | KnxExposeTime]:
|
||||
"""Create exposures from YAML config combined by entity_id."""
|
||||
exposures: list[KnxExposeEntity | KnxExposeTime] = []
|
||||
entity_exposure_map: dict[str, list[KnxExposeOptions]] = {}
|
||||
|
||||
for config in configs:
|
||||
value_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
if value_type.lower() in ExposeSchema.EXPOSE_TIME_TYPES:
|
||||
time_exposure = KnxExposeTime(
|
||||
xknx=xknx,
|
||||
config=config,
|
||||
)
|
||||
time_exposure.async_register()
|
||||
exposures.append(time_exposure)
|
||||
continue
|
||||
|
||||
entity_id = config[CONF_ENTITY_ID]
|
||||
option = _yaml_config_to_expose_options(config)
|
||||
entity_exposure_map.setdefault(entity_id, []).append(option)
|
||||
|
||||
for entity_id, options in entity_exposure_map.items():
|
||||
entity_exposure = KnxExposeEntity(
|
||||
hass=hass,
|
||||
xknx=xknx,
|
||||
entity_id=entity_id,
|
||||
options=options,
|
||||
)
|
||||
entity_exposure.async_register()
|
||||
exposures.append(entity_exposure)
|
||||
return exposures
|
||||
|
||||
|
||||
@dataclass(slots=True)
|
||||
class KnxExposeOptions:
|
||||
"""Options for KNX Expose."""
|
||||
|
||||
attribute: str | None
|
||||
group_address: GroupAddress | InternalGroupAddress
|
||||
dpt: type[DPTBase]
|
||||
respond_to_read: bool
|
||||
cooldown: float
|
||||
default: Any | None
|
||||
value_template: Template | None
|
||||
|
||||
|
||||
def _yaml_config_to_expose_options(config: ConfigType) -> KnxExposeOptions:
|
||||
"""Convert single yaml expose config to KnxExposeOptions."""
|
||||
value_type = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
dpt: type[DPTBase]
|
||||
if value_type == "binary":
|
||||
# HA yaml expose flag for DPT-1 (no explicit DPT 1 definitions in xknx back then)
|
||||
dpt = DPTSwitch
|
||||
else:
|
||||
dpt = DPTBase.parse_transcoder(config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]) # type: ignore[assignment] # checked by schema validation
|
||||
ga = parse_device_group_address(config[KNX_ADDRESS])
|
||||
return KnxExposeOptions(
|
||||
attribute=config.get(ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE),
|
||||
group_address=ga,
|
||||
dpt=dpt,
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN],
|
||||
default=config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT),
|
||||
value_template=config.get(CONF_VALUE_TEMPLATE),
|
||||
)
|
||||
|
||||
|
||||
class KnxExposeEntity:
|
||||
"""Expose Home Assistant entity values to KNX bus."""
|
||||
class KNXExposeSensor:
|
||||
"""Object to Expose Home Assistant entity to KNX bus."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
xknx: XKNX,
|
||||
entity_id: str,
|
||||
options: Iterable[KnxExposeOptions],
|
||||
config: ConfigType,
|
||||
) -> None:
|
||||
"""Initialize KnxExposeEntity class."""
|
||||
"""Initialize of Expose class."""
|
||||
self.hass = hass
|
||||
self.xknx = xknx
|
||||
self.entity_id = entity_id
|
||||
|
||||
self.entity_id: str = config[CONF_ENTITY_ID]
|
||||
self.expose_attribute: str | None = config.get(
|
||||
ExposeSchema.CONF_KNX_EXPOSE_ATTRIBUTE
|
||||
)
|
||||
self.expose_default = config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT)
|
||||
self.expose_type: int | str = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE]
|
||||
self.value_template: Template | None = config.get(CONF_VALUE_TEMPLATE)
|
||||
|
||||
self._remove_listener: Callable[[], None] | None = None
|
||||
self._exposures = tuple(
|
||||
(
|
||||
option,
|
||||
ExposeSensor(
|
||||
xknx=self.xknx,
|
||||
name=f"{self.entity_id} {option.attribute or 'state'}",
|
||||
group_address=option.group_address,
|
||||
respond_to_read=option.respond_to_read,
|
||||
value_type=option.dpt,
|
||||
cooldown=option.cooldown,
|
||||
),
|
||||
)
|
||||
for option in options
|
||||
self.device: ExposeSensor = ExposeSensor(
|
||||
xknx=self.xknx,
|
||||
name=f"{self.entity_id}__{self.expose_attribute or 'state'}",
|
||||
group_address=config[KNX_ADDRESS],
|
||||
respond_to_read=config[CONF_RESPOND_TO_READ],
|
||||
value_type=self.expose_type,
|
||||
cooldown=config[ExposeSchema.CONF_KNX_EXPOSE_COOLDOWN],
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of the expose entity."""
|
||||
expose_names = [opt.attribute or "state" for opt, _ in self._exposures]
|
||||
return f"{self.entity_id}__{'__'.join(expose_names)}"
|
||||
|
||||
@callback
|
||||
def async_register(self) -> None:
|
||||
"""Register listener and XKNX devices."""
|
||||
"""Register listener."""
|
||||
self._remove_listener = async_track_state_change_event(
|
||||
self.hass, [self.entity_id], self._async_entity_changed
|
||||
)
|
||||
for _option, xknx_expose in self._exposures:
|
||||
self.xknx.devices.async_add(xknx_expose)
|
||||
self.xknx.devices.async_add(self.device)
|
||||
self._init_expose_state()
|
||||
|
||||
@callback
|
||||
def _init_expose_state(self) -> None:
|
||||
"""Initialize state of all exposures."""
|
||||
"""Initialize state of the exposure."""
|
||||
init_state = self.hass.states.get(self.entity_id)
|
||||
for option, xknx_expose in self._exposures:
|
||||
state_value = self._get_expose_value(init_state, option)
|
||||
try:
|
||||
xknx_expose.sensor_value.value = state_value
|
||||
except ConversionError:
|
||||
_LOGGER.exception(
|
||||
"Error setting value %s for expose sensor %s",
|
||||
state_value,
|
||||
xknx_expose.name,
|
||||
)
|
||||
state_value = self._get_expose_value(init_state)
|
||||
try:
|
||||
self.device.sensor_value.value = state_value
|
||||
except ConversionError:
|
||||
_LOGGER.exception("Error during sending of expose sensor value")
|
||||
|
||||
@callback
|
||||
def async_remove(self) -> None:
|
||||
@@ -209,57 +121,53 @@ class KnxExposeEntity:
|
||||
if self._remove_listener is not None:
|
||||
self._remove_listener()
|
||||
self._remove_listener = None
|
||||
for _option, xknx_expose in self._exposures:
|
||||
self.xknx.devices.async_remove(xknx_expose)
|
||||
self.xknx.devices.async_remove(self.device)
|
||||
|
||||
def _get_expose_value(
|
||||
self, state: State | None, option: KnxExposeOptions
|
||||
) -> bool | int | float | str | None:
|
||||
"""Extract value from state for a specific option."""
|
||||
def _get_expose_value(self, state: State | None) -> bool | int | float | str | None:
|
||||
"""Extract value from state."""
|
||||
if state is None or state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE):
|
||||
if option.default is None:
|
||||
if self.expose_default is None:
|
||||
return None
|
||||
value = option.default
|
||||
elif option.attribute is not None:
|
||||
_attr = state.attributes.get(option.attribute)
|
||||
value = _attr if _attr is not None else option.default
|
||||
value = self.expose_default
|
||||
elif self.expose_attribute is not None:
|
||||
_attr = state.attributes.get(self.expose_attribute)
|
||||
value = _attr if _attr is not None else self.expose_default
|
||||
else:
|
||||
value = state.state
|
||||
|
||||
if option.value_template is not None:
|
||||
if self.value_template is not None:
|
||||
try:
|
||||
value = option.value_template.async_render_with_possible_json_value(
|
||||
value = self.value_template.async_render_with_possible_json_value(
|
||||
value, error_value=None
|
||||
)
|
||||
except (TemplateError, TypeError, ValueError) as err:
|
||||
_LOGGER.warning(
|
||||
"Error rendering value template for KNX expose %s %s %s: %s",
|
||||
self.entity_id,
|
||||
option.attribute or "state",
|
||||
option.value_template.template,
|
||||
"Error rendering value template for KNX expose %s %s: %s",
|
||||
self.device.name,
|
||||
self.value_template.template,
|
||||
err,
|
||||
)
|
||||
return None
|
||||
|
||||
if issubclass(option.dpt, DPT1BitEnum):
|
||||
if self.expose_type == "binary":
|
||||
if value in (1, STATE_ON, "True"):
|
||||
return True
|
||||
if value in (0, STATE_OFF, "False"):
|
||||
return False
|
||||
|
||||
# Handle numeric and string DPT conversions
|
||||
if value is not None:
|
||||
if value is not None and (
|
||||
isinstance(self.device.sensor_value, RemoteValueSensor)
|
||||
):
|
||||
try:
|
||||
if issubclass(option.dpt, DPTNumeric):
|
||||
if issubclass(self.device.sensor_value.dpt_class, DPTNumeric):
|
||||
return float(value)
|
||||
if issubclass(option.dpt, DPTString):
|
||||
if issubclass(self.device.sensor_value.dpt_class, DPTString):
|
||||
# DPT 16.000 only allows up to 14 Bytes
|
||||
return str(value)[:14]
|
||||
except (ValueError, TypeError) as err:
|
||||
_LOGGER.warning(
|
||||
'Could not expose %s %s value "%s" to KNX: Conversion failed: %s',
|
||||
self.entity_id,
|
||||
option.attribute or "state",
|
||||
self.expose_attribute or "state",
|
||||
value,
|
||||
err,
|
||||
)
|
||||
@@ -267,31 +175,32 @@ class KnxExposeEntity:
|
||||
return value # type: ignore[no-any-return]
|
||||
|
||||
async def _async_entity_changed(self, event: Event[EventStateChangedData]) -> None:
|
||||
"""Handle entity change for all options."""
|
||||
"""Handle entity change."""
|
||||
new_state = event.data["new_state"]
|
||||
async with TaskGroup() as tg:
|
||||
for option, xknx_expose in self._exposures:
|
||||
expose_value = self._get_expose_value(new_state, option)
|
||||
if expose_value is None:
|
||||
continue
|
||||
tg.create_task(self._async_set_knx_value(xknx_expose, expose_value))
|
||||
if (new_value := self._get_expose_value(new_state)) is None:
|
||||
return
|
||||
old_state = event.data["old_state"]
|
||||
# don't use default value for comparison on first state change (old_state is None)
|
||||
old_value = self._get_expose_value(old_state) if old_state is not None else None
|
||||
# don't send same value sequentially
|
||||
if new_value != old_value:
|
||||
await self._async_set_knx_value(new_value)
|
||||
|
||||
async def _async_set_knx_value(
|
||||
self, xknx_expose: ExposeSensor, value: StateType
|
||||
) -> None:
|
||||
async def _async_set_knx_value(self, value: StateType) -> None:
|
||||
"""Set new value on xknx ExposeSensor."""
|
||||
try:
|
||||
await xknx_expose.set(value, skip_unchanged=True)
|
||||
await self.device.set(value)
|
||||
except ConversionError as err:
|
||||
_LOGGER.warning(
|
||||
'Could not expose %s value "%s" to KNX: %s',
|
||||
xknx_expose.name,
|
||||
'Could not expose %s %s value "%s" to KNX: %s',
|
||||
self.entity_id,
|
||||
self.expose_attribute or "state",
|
||||
value,
|
||||
err,
|
||||
)
|
||||
|
||||
|
||||
class KnxExposeTime:
|
||||
class KNXExposeTime:
|
||||
"""Object to Expose Time/Date object to KNX bus."""
|
||||
|
||||
def __init__(self, xknx: XKNX, config: ConfigType) -> None:
|
||||
@@ -313,11 +222,6 @@ class KnxExposeTime:
|
||||
group_address=config[KNX_ADDRESS],
|
||||
)
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Return name of the time expose object."""
|
||||
return f"expose_{self.device.name}"
|
||||
|
||||
@callback
|
||||
def async_register(self) -> None:
|
||||
"""Register listener."""
|
||||
|
||||
@@ -54,7 +54,7 @@ from .const import (
|
||||
TELEGRAM_LOG_DEFAULT,
|
||||
)
|
||||
from .device import KNXInterfaceDevice
|
||||
from .expose import KnxExposeEntity, KnxExposeTime
|
||||
from .expose import KNXExposeSensor, KNXExposeTime
|
||||
from .project import KNXProject
|
||||
from .repairs import data_secure_group_key_issue_dispatcher
|
||||
from .storage.config_store import KNXConfigStore
|
||||
@@ -73,8 +73,8 @@ class KNXModule:
|
||||
self.hass = hass
|
||||
self.config_yaml = config
|
||||
self.connected = False
|
||||
self.yaml_exposures: list[KnxExposeEntity | KnxExposeTime] = []
|
||||
self.service_exposures: dict[str, KnxExposeEntity | KnxExposeTime] = {}
|
||||
self.exposures: list[KNXExposeSensor | KNXExposeTime] = []
|
||||
self.service_exposures: dict[str, KNXExposeSensor | KNXExposeTime] = {}
|
||||
self.entry = entry
|
||||
|
||||
self.project = KNXProject(hass=hass, entry=entry)
|
||||
|
||||
@@ -11,7 +11,7 @@
|
||||
"loggers": ["xknx", "xknxproject"],
|
||||
"quality_scale": "platinum",
|
||||
"requirements": [
|
||||
"xknx==3.14.0",
|
||||
"xknx==3.13.0",
|
||||
"xknxproject==3.8.2",
|
||||
"knx-frontend==2025.12.30.151231"
|
||||
],
|
||||
|
||||
@@ -193,7 +193,7 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
|
||||
" for '%s' - %s"
|
||||
),
|
||||
group_address,
|
||||
replaced_exposure.name,
|
||||
replaced_exposure.device.name,
|
||||
)
|
||||
replaced_exposure.async_remove()
|
||||
exposure = create_knx_exposure(knx_module.hass, knx_module.xknx, call.data)
|
||||
@@ -201,7 +201,7 @@ async def service_exposure_register_modify(call: ServiceCall) -> None:
|
||||
_LOGGER.debug(
|
||||
"Service exposure_register registered exposure for '%s' - %s",
|
||||
group_address,
|
||||
exposure.name,
|
||||
exposure.device.name,
|
||||
)
|
||||
|
||||
|
||||
|
||||
@@ -256,8 +256,6 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
|
||||
and WidgetType.CM_BREW_BY_WEIGHT_DOSES
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
LaMarzoccoNumberEntityDescription(
|
||||
@@ -291,8 +289,6 @@ ENTITIES: tuple[LaMarzoccoNumberEntityDescription, ...] = (
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
|
||||
and WidgetType.CM_BREW_BY_WEIGHT_DOSES
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -149,8 +149,6 @@ ENTITIES: tuple[LaMarzoccoSelectEntityDescription, ...] = (
|
||||
supported_fn=(
|
||||
lambda coordinator: coordinator.device.dashboard.model_name
|
||||
in (ModelName.LINEA_MINI, ModelName.LINEA_MINI_R)
|
||||
and WidgetType.CM_BREW_BY_WEIGHT_DOSES
|
||||
in coordinator.device.dashboard.config
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
@@ -56,7 +56,7 @@ class StateConditionBase(Condition):
|
||||
if TYPE_CHECKING:
|
||||
assert config.target
|
||||
assert config.options
|
||||
self._target_selection = target.TargetSelection(config.target)
|
||||
self._target = config.target
|
||||
self._behavior = config.options[ATTR_BEHAVIOR]
|
||||
self._state = state
|
||||
|
||||
@@ -80,8 +80,9 @@ class StateConditionBase(Condition):
|
||||
|
||||
def test_state(**kwargs: Unpack[ConditionCheckParams]) -> bool:
|
||||
"""Test state condition."""
|
||||
target_selection = target.TargetSelection(self._target)
|
||||
targeted_entities = target.async_extract_referenced_entity_ids(
|
||||
self._hass, self._target_selection, expand_group=False
|
||||
self._hass, target_selection, expand_group=False
|
||||
)
|
||||
referenced_entity_ids = targeted_entities.referenced.union(
|
||||
targeted_entities.indirectly_referenced
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
},
|
||||
"conditions": {
|
||||
"is_off": {
|
||||
"description": "Tests if one or more lights are off.",
|
||||
"description": "Test if a light is off.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::common::condition_behavior_description%]",
|
||||
@@ -52,7 +52,7 @@
|
||||
"name": "If a light is off"
|
||||
},
|
||||
"is_on": {
|
||||
"description": "Tests if one or more lights are on.",
|
||||
"description": "Test if a light is on.",
|
||||
"fields": {
|
||||
"behavior": {
|
||||
"description": "[%key:component::light::common::condition_behavior_description%]",
|
||||
|
||||
@@ -7,6 +7,7 @@
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["london_tube_status"],
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["london-tube-status==0.5"],
|
||||
"single_config_entry": true
|
||||
}
|
||||
|
||||
@@ -528,10 +528,7 @@ DISCOVERY_SCHEMAS = [
|
||||
),
|
||||
),
|
||||
entity_class=MatterBinarySensor,
|
||||
required_attributes=(
|
||||
clusters.Thermostat.Attributes.RemoteSensing,
|
||||
clusters.Thermostat.Attributes.OutdoorTemperature,
|
||||
),
|
||||
required_attributes=(clusters.Thermostat.Attributes.RemoteSensing,),
|
||||
allow_multi=True,
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
|
||||
@@ -66,9 +66,8 @@ class MatterRangeNumberEntityDescription(
|
||||
format_max_value: Callable[[float], float] = lambda x: x
|
||||
|
||||
# command: a custom callback to create the command to send to the device
|
||||
# the callback's argument will be the converted device value from ha_to_device
|
||||
# if omitted the command will just be a write_attribute command to the primary attribute
|
||||
command: Callable[[int], ClusterCommand] | None = None
|
||||
# the callback's argument will be the index of the selected list value
|
||||
command: Callable[[int], ClusterCommand]
|
||||
|
||||
|
||||
class MatterNumber(MatterEntity, NumberEntity):
|
||||
@@ -100,15 +99,9 @@ class MatterRangeNumber(MatterEntity, NumberEntity):
|
||||
async def async_set_native_value(self, value: float) -> None:
|
||||
"""Update the current value."""
|
||||
send_value = self.entity_description.ha_to_device(value)
|
||||
if self.entity_description.command:
|
||||
# custom command defined to set the new value
|
||||
await self.send_device_command(
|
||||
self.entity_description.command(send_value),
|
||||
)
|
||||
return
|
||||
# regular write attribute to set the new value
|
||||
await self.write_attribute(
|
||||
value=send_value,
|
||||
# custom command defined to set the new value
|
||||
await self.send_device_command(
|
||||
self.entity_description.command(send_value),
|
||||
)
|
||||
|
||||
@callback
|
||||
@@ -260,30 +253,6 @@ DISCOVERY_SCHEMAS = [
|
||||
entity_class=MatterNumber,
|
||||
required_attributes=(custom_clusters.EveCluster.Attributes.Altitude,),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterRangeNumberEntityDescription(
|
||||
key="ThermostatOccupiedSetback",
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
translation_key="occupied_setback",
|
||||
native_unit_of_measurement=UnitOfTemperature.CELSIUS,
|
||||
device_to_ha=lambda x: None if x is None else x / 10,
|
||||
ha_to_device=lambda x: round(x * 10),
|
||||
format_min_value=lambda x: x / 10,
|
||||
format_max_value=lambda x: x / 10,
|
||||
min_attribute=clusters.Thermostat.Attributes.OccupiedSetbackMin,
|
||||
max_attribute=clusters.Thermostat.Attributes.OccupiedSetbackMax,
|
||||
native_step=0.5,
|
||||
mode=NumberMode.BOX,
|
||||
),
|
||||
entity_class=MatterRangeNumber,
|
||||
required_attributes=(
|
||||
clusters.Thermostat.Attributes.OccupiedSetback,
|
||||
clusters.Thermostat.Attributes.OccupiedSetbackMin,
|
||||
clusters.Thermostat.Attributes.OccupiedSetbackMax,
|
||||
),
|
||||
featuremap_contains=(clusters.Thermostat.Bitmaps.Feature.kSetback),
|
||||
),
|
||||
MatterDiscoverySchema(
|
||||
platform=Platform.NUMBER,
|
||||
entity_description=MatterNumberEntityDescription(
|
||||
|
||||
@@ -642,7 +642,6 @@ DISCOVERY_SCHEMAS = [
|
||||
list_attribute=clusters.DoorLock.Attributes.SupportedOperatingModes,
|
||||
device_to_ha=DOOR_LOCK_OPERATING_MODE_MAP.get,
|
||||
ha_to_device=DOOR_LOCK_OPERATING_MODE_MAP_REVERSE.get,
|
||||
entity_category=EntityCategory.CONFIG,
|
||||
),
|
||||
entity_class=MatterDoorLockOperatingModeSelectEntity,
|
||||
required_attributes=(
|
||||
|
||||
@@ -217,9 +217,6 @@
|
||||
"led_indicator_intensity_on": {
|
||||
"name": "LED on intensity"
|
||||
},
|
||||
"occupied_setback": {
|
||||
"name": "Occupied setback"
|
||||
},
|
||||
"off_transition_time": {
|
||||
"name": "Off transition time"
|
||||
},
|
||||
|
||||
@@ -4,70 +4,45 @@ from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from datetime import timedelta
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientConnectionError, ClientResponseError
|
||||
from pymelcloud import get_devices
|
||||
from pymelcloud import Device, get_devices
|
||||
from pymelcloud.atw_device import Zone
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_TOKEN, Platform
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers import device_registry as dr
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import UpdateFailed
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.util import Throttle
|
||||
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=15)
|
||||
|
||||
PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.WATER_HEATER]
|
||||
|
||||
type MelCloudConfigEntry = ConfigEntry[dict[str, list[MelCloudDevice]]]
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: MelCloudConfigEntry) -> bool:
|
||||
"""Establish connection with MELCloud."""
|
||||
conf = entry.data
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
all_devices = await get_devices(
|
||||
token=entry.data[CONF_TOKEN],
|
||||
session=async_get_clientsession(hass),
|
||||
conf_update_interval=timedelta(minutes=30),
|
||||
device_set_debounce=timedelta(seconds=2),
|
||||
)
|
||||
mel_devices = await mel_devices_setup(hass, conf[CONF_TOKEN])
|
||||
except ClientResponseError as ex:
|
||||
if ex.status in (HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN):
|
||||
if isinstance(ex, ClientResponseError) and ex.code == 401:
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
if ex.status == HTTPStatus.TOO_MANY_REQUESTS:
|
||||
raise UpdateFailed(
|
||||
"MELCloud rate limit exceeded. Your account may be temporarily blocked"
|
||||
) from ex
|
||||
raise UpdateFailed(f"Error communicating with MELCloud: {ex}") from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
except (TimeoutError, ClientConnectionError) as ex:
|
||||
raise UpdateFailed(f"Error communicating with MELCloud: {ex}") from ex
|
||||
raise ConfigEntryNotReady from ex
|
||||
|
||||
# Create per-device coordinators
|
||||
coordinators: dict[str, list[MelCloudDeviceUpdateCoordinator]] = {}
|
||||
device_registry = dr.async_get(hass)
|
||||
for device_type, devices in all_devices.items():
|
||||
# Build coordinators for this device_type
|
||||
coordinators[device_type] = [
|
||||
MelCloudDeviceUpdateCoordinator(hass, device, entry) for device in devices
|
||||
]
|
||||
|
||||
# Perform initial refreshes concurrently
|
||||
await asyncio.gather(
|
||||
*(
|
||||
coordinator.async_config_entry_first_refresh()
|
||||
for coordinator in coordinators[device_type]
|
||||
)
|
||||
)
|
||||
|
||||
# Register parent devices so zone entities can reference via_device
|
||||
for coordinator in coordinators[device_type]:
|
||||
device_registry.async_get_or_create(
|
||||
config_entry_id=entry.entry_id,
|
||||
**coordinator.device_info,
|
||||
)
|
||||
|
||||
entry.runtime_data = coordinators
|
||||
entry.runtime_data = mel_devices
|
||||
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
|
||||
return True
|
||||
|
||||
@@ -75,3 +50,90 @@ async def async_setup_entry(hass: HomeAssistant, entry: MelCloudConfigEntry) ->
|
||||
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return await hass.config_entries.async_unload_platforms(config_entry, PLATFORMS)
|
||||
|
||||
|
||||
class MelCloudDevice:
|
||||
"""MELCloud Device instance."""
|
||||
|
||||
def __init__(self, device: Device) -> None:
|
||||
"""Construct a device wrapper."""
|
||||
self.device = device
|
||||
self.name = device.name
|
||||
self._available = True
|
||||
|
||||
@Throttle(MIN_TIME_BETWEEN_UPDATES)
|
||||
async def async_update(self, **kwargs):
|
||||
"""Pull the latest data from MELCloud."""
|
||||
try:
|
||||
await self.device.update()
|
||||
self._available = True
|
||||
except ClientConnectionError:
|
||||
_LOGGER.warning("Connection failed for %s", self.name)
|
||||
self._available = False
|
||||
|
||||
async def async_set(self, properties: dict[str, Any]):
|
||||
"""Write state changes to the MELCloud API."""
|
||||
try:
|
||||
await self.device.set(properties)
|
||||
self._available = True
|
||||
except ClientConnectionError:
|
||||
_LOGGER.warning("Connection failed for %s", self.name)
|
||||
self._available = False
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return self._available
|
||||
|
||||
@property
|
||||
def device_id(self):
|
||||
"""Return device ID."""
|
||||
return self.device.device_id
|
||||
|
||||
@property
|
||||
def building_id(self):
|
||||
"""Return building ID of the device."""
|
||||
return self.device.building_id
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return a device description for device registry."""
|
||||
model = None
|
||||
if (unit_infos := self.device.units) is not None:
|
||||
model = ", ".join([x["model"] for x in unit_infos if x["model"]])
|
||||
return DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, self.device.mac)},
|
||||
identifiers={(DOMAIN, f"{self.device.mac}-{self.device.serial}")},
|
||||
manufacturer="Mitsubishi Electric",
|
||||
model=model,
|
||||
name=self.name,
|
||||
)
|
||||
|
||||
def zone_device_info(self, zone: Zone) -> DeviceInfo:
|
||||
"""Return a zone device description for device registry."""
|
||||
dev = self.device
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{dev.mac}-{dev.serial}-{zone.zone_index}")},
|
||||
manufacturer="Mitsubishi Electric",
|
||||
model="ATW zone device",
|
||||
name=f"{self.name} {zone.name}",
|
||||
via_device=(DOMAIN, f"{dev.mac}-{dev.serial}"),
|
||||
)
|
||||
|
||||
|
||||
async def mel_devices_setup(
|
||||
hass: HomeAssistant, token: str
|
||||
) -> dict[str, list[MelCloudDevice]]:
|
||||
"""Query connected devices from MELCloud."""
|
||||
session = async_get_clientsession(hass)
|
||||
async with asyncio.timeout(10):
|
||||
all_devices = await get_devices(
|
||||
token,
|
||||
session,
|
||||
conf_update_interval=timedelta(minutes=30),
|
||||
device_set_debounce=timedelta(seconds=2),
|
||||
)
|
||||
wrapped_devices: dict[str, list[MelCloudDevice]] = {}
|
||||
for device_type, devices in all_devices.items():
|
||||
wrapped_devices[device_type] = [MelCloudDevice(device) for device in devices]
|
||||
return wrapped_devices
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
from typing import Any, cast
|
||||
|
||||
from pymelcloud import DEVICE_TYPE_ATA, DEVICE_TYPE_ATW, AtaDevice, AtwDevice
|
||||
@@ -28,6 +29,7 @@ from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv, entity_platform
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import MelCloudConfigEntry, MelCloudDevice
|
||||
from .const import (
|
||||
ATTR_STATUS,
|
||||
ATTR_VANE_HORIZONTAL,
|
||||
@@ -38,8 +40,9 @@ from .const import (
|
||||
SERVICE_SET_VANE_HORIZONTAL,
|
||||
SERVICE_SET_VANE_VERTICAL,
|
||||
)
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .entity import MelCloudEntity
|
||||
|
||||
SCAN_INTERVAL = timedelta(seconds=60)
|
||||
|
||||
|
||||
ATA_HVAC_MODE_LOOKUP = {
|
||||
ata.OPERATION_MODE_HEAT: HVACMode.HEAT,
|
||||
@@ -71,24 +74,27 @@ ATW_ZONE_HVAC_ACTION_LOOKUP = {
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
hass: HomeAssistant,
|
||||
entry: MelCloudConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MelCloud device climate based on config_entry."""
|
||||
coordinators = entry.runtime_data
|
||||
mel_devices = entry.runtime_data
|
||||
entities: list[AtaDeviceClimate | AtwDeviceZoneClimate] = [
|
||||
AtaDeviceClimate(coordinator, coordinator.device)
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATA, [])
|
||||
AtaDeviceClimate(mel_device, mel_device.device)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATA]
|
||||
]
|
||||
entities.extend(
|
||||
[
|
||||
AtwDeviceZoneClimate(coordinator, coordinator.device, zone)
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
|
||||
for zone in coordinator.device.zones
|
||||
AtwDeviceZoneClimate(mel_device, mel_device.device, zone)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATW]
|
||||
for zone in mel_device.device.zones
|
||||
]
|
||||
)
|
||||
async_add_entities(entities)
|
||||
async_add_entities(
|
||||
entities,
|
||||
True,
|
||||
)
|
||||
|
||||
platform = entity_platform.async_get_current_platform()
|
||||
platform.async_register_entity_service(
|
||||
@@ -103,19 +109,21 @@ async def async_setup_entry(
|
||||
)
|
||||
|
||||
|
||||
class MelCloudClimate(MelCloudEntity, ClimateEntity):
|
||||
class MelCloudClimate(ClimateEntity):
|
||||
"""Base climate device."""
|
||||
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
) -> None:
|
||||
def __init__(self, device: MelCloudDevice) -> None:
|
||||
"""Initialize the climate."""
|
||||
super().__init__(coordinator)
|
||||
self._base_device = self.coordinator.device
|
||||
self.api = device
|
||||
self._base_device = self.api.device
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update state from MELCloud."""
|
||||
await self.api.async_update()
|
||||
|
||||
@property
|
||||
def target_temperature_step(self) -> float | None:
|
||||
@@ -134,29 +142,26 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
| ClimateEntityFeature.TURN_ON
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
ata_device: AtaDevice,
|
||||
) -> None:
|
||||
def __init__(self, device: MelCloudDevice, ata_device: AtaDevice) -> None:
|
||||
"""Initialize the climate."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(device)
|
||||
self._device = ata_device
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{self.coordinator.device.serial}-{self.coordinator.device.mac}"
|
||||
)
|
||||
self._attr_device_info = self.coordinator.device_info
|
||||
self._attr_unique_id = f"{self.api.device.serial}-{self.api.device.mac}"
|
||||
self._attr_device_info = self.api.device_info
|
||||
|
||||
# Add horizontal swing if device supports it
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
|
||||
# We can only check for vane_horizontal once we fetch the device data from the cloud
|
||||
if self._device.vane_horizontal:
|
||||
self._attr_supported_features |= ClimateEntityFeature.SWING_HORIZONTAL_MODE
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
"""Return the optional state attributes with device specific additions."""
|
||||
attr: dict[str, Any] = {}
|
||||
attr.update(self.coordinator.extra_attributes)
|
||||
attr = {}
|
||||
|
||||
if vane_horizontal := self._device.vane_horizontal:
|
||||
attr.update(
|
||||
@@ -203,7 +208,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
"""Set new target hvac mode."""
|
||||
set_dict: dict[str, Any] = {}
|
||||
self._apply_set_hvac_mode(hvac_mode, set_dict)
|
||||
await self.coordinator.async_set(set_dict)
|
||||
await self._device.set(set_dict)
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
@@ -236,7 +241,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
set_dict["target_temperature"] = kwargs.get(ATTR_TEMPERATURE)
|
||||
|
||||
if set_dict:
|
||||
await self.coordinator.async_set(set_dict)
|
||||
await self._device.set(set_dict)
|
||||
|
||||
@property
|
||||
def fan_mode(self) -> str | None:
|
||||
@@ -245,7 +250,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
|
||||
async def async_set_fan_mode(self, fan_mode: str) -> None:
|
||||
"""Set new target fan mode."""
|
||||
await self.coordinator.async_set({"fan_speed": fan_mode})
|
||||
await self._device.set({"fan_speed": fan_mode})
|
||||
|
||||
@property
|
||||
def fan_modes(self) -> list[str] | None:
|
||||
@@ -259,7 +264,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
f"Invalid horizontal vane position {position}. Valid positions:"
|
||||
f" [{self._device.vane_horizontal_positions}]."
|
||||
)
|
||||
await self.coordinator.async_set({ata.PROPERTY_VANE_HORIZONTAL: position})
|
||||
await self._device.set({ata.PROPERTY_VANE_HORIZONTAL: position})
|
||||
|
||||
async def async_set_vane_vertical(self, position: str) -> None:
|
||||
"""Set vertical vane position."""
|
||||
@@ -268,7 +273,7 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
f"Invalid vertical vane position {position}. Valid positions:"
|
||||
f" [{self._device.vane_vertical_positions}]."
|
||||
)
|
||||
await self.coordinator.async_set({ata.PROPERTY_VANE_VERTICAL: position})
|
||||
await self._device.set({ata.PROPERTY_VANE_VERTICAL: position})
|
||||
|
||||
@property
|
||||
def swing_mode(self) -> str | None:
|
||||
@@ -300,11 +305,11 @@ class AtaDeviceClimate(MelCloudClimate):
|
||||
|
||||
async def async_turn_on(self) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.coordinator.async_set({"power": True})
|
||||
await self._device.set({"power": True})
|
||||
|
||||
async def async_turn_off(self) -> None:
|
||||
"""Turn the entity off."""
|
||||
await self.coordinator.async_set({"power": False})
|
||||
await self._device.set({"power": False})
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
@@ -333,18 +338,15 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
||||
_attr_supported_features = ClimateEntityFeature.TARGET_TEMPERATURE
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
atw_device: AtwDevice,
|
||||
atw_zone: Zone,
|
||||
self, device: MelCloudDevice, atw_device: AtwDevice, atw_zone: Zone
|
||||
) -> None:
|
||||
"""Initialize the climate."""
|
||||
super().__init__(coordinator)
|
||||
super().__init__(device)
|
||||
self._device = atw_device
|
||||
self._zone = atw_zone
|
||||
|
||||
self._attr_unique_id = f"{self.coordinator.device.serial}-{atw_zone.zone_index}"
|
||||
self._attr_device_info = self.coordinator.zone_device_info(atw_zone)
|
||||
self._attr_unique_id = f"{self.api.device.serial}-{atw_zone.zone_index}"
|
||||
self._attr_device_info = self.api.zone_device_info(atw_zone)
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any]:
|
||||
@@ -358,16 +360,15 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
||||
@property
|
||||
def hvac_mode(self) -> HVACMode:
|
||||
"""Return hvac operation ie. heat, cool mode."""
|
||||
# Use zone status (heat/cool/idle) not operation_mode (heat-thermostat/etc.)
|
||||
status = self._zone.status
|
||||
if not self._device.power or status is None:
|
||||
mode = self._zone.operation_mode
|
||||
if not self._device.power or mode is None:
|
||||
return HVACMode.OFF
|
||||
return ATW_ZONE_HVAC_MODE_LOOKUP.get(status, HVACMode.OFF)
|
||||
return ATW_ZONE_HVAC_MODE_LOOKUP.get(mode, HVACMode.OFF)
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
if hvac_mode == HVACMode.OFF:
|
||||
await self.coordinator.async_set({"power": False})
|
||||
await self._device.set({"power": False})
|
||||
return
|
||||
|
||||
operation_mode = ATW_ZONE_HVAC_MODE_REVERSE_LOOKUP.get(hvac_mode)
|
||||
@@ -380,7 +381,7 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
||||
props = {PROPERTY_ZONE_2_OPERATION_MODE: operation_mode}
|
||||
if self.hvac_mode == HVACMode.OFF:
|
||||
props["power"] = True
|
||||
await self.coordinator.async_set(props)
|
||||
await self._device.set(props)
|
||||
|
||||
@property
|
||||
def hvac_modes(self) -> list[HVACMode]:
|
||||
@@ -409,4 +410,3 @@ class AtwDeviceZoneClimate(MelCloudClimate):
|
||||
await self._zone.set_target_temperature(
|
||||
kwargs.get(ATTR_TEMPERATURE, self.target_temperature)
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@@ -5,6 +5,7 @@ from __future__ import annotations
|
||||
import asyncio
|
||||
from collections.abc import Mapping
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError, ClientResponseError
|
||||
@@ -17,6 +18,8 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow."""
|
||||
@@ -34,7 +37,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
async def _create_client(
|
||||
self,
|
||||
username: str,
|
||||
password: str,
|
||||
*,
|
||||
password: str | None = None,
|
||||
token: str | None = None,
|
||||
) -> ConfigFlowResult:
|
||||
"""Create client."""
|
||||
@@ -42,13 +46,13 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
async with asyncio.timeout(10):
|
||||
if (acquired_token := token) is None:
|
||||
acquired_token = await pymelcloud.login(
|
||||
email=username,
|
||||
password=password,
|
||||
session=async_get_clientsession(self.hass),
|
||||
username,
|
||||
password,
|
||||
async_get_clientsession(self.hass),
|
||||
)
|
||||
await pymelcloud.get_devices(
|
||||
token=acquired_token,
|
||||
session=async_get_clientsession(self.hass),
|
||||
acquired_token,
|
||||
async_get_clientsession(self.hass),
|
||||
)
|
||||
except ClientResponseError as err:
|
||||
if err.status in (HTTPStatus.UNAUTHORIZED, HTTPStatus.FORBIDDEN):
|
||||
@@ -56,10 +60,6 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except (TimeoutError, ClientError):
|
||||
return self.async_abort(reason="cannot_connect")
|
||||
except AttributeError:
|
||||
# python-melcloud library bug: login() raises AttributeError on invalid
|
||||
# credentials when API response doesn't contain expected "LoginData" key
|
||||
return self.async_abort(reason="invalid_auth")
|
||||
|
||||
return await self._create_entry(username, acquired_token)
|
||||
|
||||
@@ -74,9 +74,8 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
{vol.Required(CONF_USERNAME): str, vol.Required(CONF_PASSWORD): str}
|
||||
),
|
||||
)
|
||||
return await self._create_client(
|
||||
username=user_input[CONF_USERNAME], password=user_input[CONF_PASSWORD]
|
||||
)
|
||||
username = user_input[CONF_USERNAME]
|
||||
return await self._create_client(username, password=user_input[CONF_PASSWORD])
|
||||
|
||||
async def async_step_reauth(
|
||||
self, entry_data: Mapping[str, Any]
|
||||
@@ -115,9 +114,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
acquired_token = await pymelcloud.login(
|
||||
email=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_get_clientsession(self.hass),
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
async_get_clientsession(self.hass),
|
||||
)
|
||||
except (ClientResponseError, AttributeError) as err:
|
||||
if (
|
||||
@@ -131,7 +130,10 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
errors["base"] = "invalid_auth"
|
||||
else:
|
||||
errors["base"] = "cannot_connect"
|
||||
except (TimeoutError, ClientError):
|
||||
except (
|
||||
TimeoutError,
|
||||
ClientError,
|
||||
):
|
||||
errors["base"] = "cannot_connect"
|
||||
|
||||
return acquired_token, errors
|
||||
@@ -149,9 +151,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN):
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
acquired_token = await pymelcloud.login(
|
||||
email=user_input[CONF_USERNAME],
|
||||
password=user_input[CONF_PASSWORD],
|
||||
session=async_get_clientsession(self.hass),
|
||||
user_input[CONF_USERNAME],
|
||||
user_input[CONF_PASSWORD],
|
||||
async_get_clientsession(self.hass),
|
||||
)
|
||||
except (ClientResponseError, AttributeError) as err:
|
||||
if (
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
"""DataUpdateCoordinator for the MELCloud integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientConnectionError, ClientResponseError
|
||||
from pymelcloud import Device
|
||||
from pymelcloud.atw_device import Zone
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.exceptions import ConfigEntryAuthFailed
|
||||
from homeassistant.helpers.debounce import Debouncer
|
||||
from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
# Delay before refreshing after a state change to allow device to process
|
||||
# and avoid race conditions with rapid sequential changes
|
||||
REQUEST_REFRESH_DELAY = 1.5
|
||||
|
||||
# Default update interval in minutes (matches upstream Throttle value)
|
||||
DEFAULT_UPDATE_INTERVAL = 15
|
||||
|
||||
# Retry interval in seconds for transient failures
|
||||
RETRY_INTERVAL_SECONDS = 30
|
||||
|
||||
# Number of consecutive failures before marking device unavailable
|
||||
MAX_CONSECUTIVE_FAILURES = 3
|
||||
|
||||
|
||||
class MelCloudDeviceUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Per-device coordinator for MELCloud data updates."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
hass: HomeAssistant,
|
||||
device: Device,
|
||||
config_entry: ConfigEntry,
|
||||
) -> None:
|
||||
"""Initialize the per-device coordinator."""
|
||||
self.device = device
|
||||
self.device_available = True
|
||||
self._consecutive_failures = 0
|
||||
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=f"{DOMAIN}_{device.name}",
|
||||
update_interval=timedelta(minutes=DEFAULT_UPDATE_INTERVAL),
|
||||
always_update=True,
|
||||
request_refresh_debouncer=Debouncer(
|
||||
hass,
|
||||
_LOGGER,
|
||||
cooldown=REQUEST_REFRESH_DELAY,
|
||||
immediate=False,
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def extra_attributes(self) -> dict[str, Any]:
|
||||
"""Return extra device attributes."""
|
||||
data: dict[str, Any] = {
|
||||
"device_id": self.device.device_id,
|
||||
"serial": self.device.serial,
|
||||
"mac": self.device.mac,
|
||||
}
|
||||
if (unit_infos := self.device.units) is not None:
|
||||
for i, unit in enumerate(unit_infos[:2]):
|
||||
data[f"unit_{i}_model"] = unit.get("model")
|
||||
data[f"unit_{i}_serial"] = unit.get("serial")
|
||||
return data
|
||||
|
||||
@property
|
||||
def device_id(self) -> str:
|
||||
"""Return device ID."""
|
||||
return self.device.device_id
|
||||
|
||||
@property
|
||||
def building_id(self) -> str:
|
||||
"""Return building ID of the device."""
|
||||
return self.device.building_id
|
||||
|
||||
@property
|
||||
def device_info(self) -> DeviceInfo:
|
||||
"""Return a device description for device registry."""
|
||||
model = None
|
||||
if (unit_infos := self.device.units) is not None:
|
||||
model = ", ".join([x["model"] for x in unit_infos if x["model"]])
|
||||
return DeviceInfo(
|
||||
connections={(CONNECTION_NETWORK_MAC, self.device.mac)},
|
||||
identifiers={(DOMAIN, f"{self.device.mac}-{self.device.serial}")},
|
||||
manufacturer="Mitsubishi Electric",
|
||||
model=model,
|
||||
name=self.device.name,
|
||||
)
|
||||
|
||||
def zone_device_info(self, zone: Zone) -> DeviceInfo:
|
||||
"""Return a zone device description for device registry."""
|
||||
dev = self.device
|
||||
return DeviceInfo(
|
||||
identifiers={(DOMAIN, f"{dev.mac}-{dev.serial}-{zone.zone_index}")},
|
||||
manufacturer="Mitsubishi Electric",
|
||||
model="ATW zone device",
|
||||
name=f"{self.device.name} {zone.name}",
|
||||
via_device=(DOMAIN, f"{dev.mac}-{dev.serial}"),
|
||||
)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Fetch data for this specific device from MELCloud."""
|
||||
try:
|
||||
await self.device.update()
|
||||
# Success - reset failure counter and restore normal interval
|
||||
if self._consecutive_failures > 0:
|
||||
_LOGGER.info(
|
||||
"Connection restored for %s after %d failed attempt(s)",
|
||||
self.device.name,
|
||||
self._consecutive_failures,
|
||||
)
|
||||
self._consecutive_failures = 0
|
||||
self.update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL)
|
||||
self.device_available = True
|
||||
except ClientResponseError as ex:
|
||||
if ex.status in (401, 403):
|
||||
raise ConfigEntryAuthFailed from ex
|
||||
if ex.status == 429:
|
||||
_LOGGER.error(
|
||||
"MELCloud rate limit exceeded for %s. Your account may be "
|
||||
"temporarily blocked",
|
||||
self.device.name,
|
||||
)
|
||||
# Rate limit - mark unavailable immediately
|
||||
self.device_available = False
|
||||
raise UpdateFailed(
|
||||
f"Rate limit exceeded for {self.device.name}"
|
||||
) from ex
|
||||
# Other HTTP errors - use retry logic
|
||||
self._handle_failure(f"Error updating {self.device.name}: {ex}", ex)
|
||||
except ClientConnectionError as ex:
|
||||
self._handle_failure(f"Connection failed for {self.device.name}: {ex}", ex)
|
||||
|
||||
def _handle_failure(self, message: str, exception: Exception | None = None) -> None:
|
||||
"""Handle a connection failure with retry logic.
|
||||
|
||||
For transient failures, entities remain available with their last known
|
||||
values for up to MAX_CONSECUTIVE_FAILURES attempts. During retries, the
|
||||
update interval is shortened to RETRY_INTERVAL_SECONDS for faster recovery.
|
||||
After the threshold is reached, entities are marked unavailable.
|
||||
"""
|
||||
self._consecutive_failures += 1
|
||||
|
||||
if self._consecutive_failures < MAX_CONSECUTIVE_FAILURES:
|
||||
# Keep entities available with cached data, use shorter retry interval
|
||||
_LOGGER.warning(
|
||||
"%s (attempt %d/%d, retrying in %ds)",
|
||||
message,
|
||||
self._consecutive_failures,
|
||||
MAX_CONSECUTIVE_FAILURES,
|
||||
RETRY_INTERVAL_SECONDS,
|
||||
)
|
||||
self.update_interval = timedelta(seconds=RETRY_INTERVAL_SECONDS)
|
||||
else:
|
||||
# Threshold reached - mark unavailable and restore normal interval
|
||||
_LOGGER.warning(
|
||||
"%s (attempt %d/%d, marking unavailable)",
|
||||
message,
|
||||
self._consecutive_failures,
|
||||
MAX_CONSECUTIVE_FAILURES,
|
||||
)
|
||||
self.device_available = False
|
||||
self.update_interval = timedelta(minutes=DEFAULT_UPDATE_INTERVAL)
|
||||
raise UpdateFailed(message) from exception
|
||||
|
||||
async def async_set(self, properties: dict[str, Any]) -> None:
|
||||
"""Write state changes to the MELCloud API."""
|
||||
try:
|
||||
await self.device.set(properties)
|
||||
self.device_available = True
|
||||
except ClientConnectionError:
|
||||
_LOGGER.warning("Connection failed for %s", self.device.name)
|
||||
self.device_available = False
|
||||
|
||||
await self.async_request_refresh()
|
||||
|
||||
|
||||
type MelCloudConfigEntry = ConfigEntry[dict[str, list[MelCloudDeviceUpdateCoordinator]]]
|
||||
@@ -9,7 +9,7 @@ from homeassistant.const import CONF_TOKEN, CONF_USERNAME
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import entity_registry as er
|
||||
|
||||
from .coordinator import MelCloudConfigEntry
|
||||
from . import MelCloudConfigEntry
|
||||
|
||||
TO_REDACT = {
|
||||
CONF_USERNAME,
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
"""Base entity for MELCloud integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from homeassistant.helpers.update_coordinator import CoordinatorEntity
|
||||
|
||||
from .coordinator import MelCloudDeviceUpdateCoordinator
|
||||
|
||||
|
||||
class MelCloudEntity(CoordinatorEntity[MelCloudDeviceUpdateCoordinator]):
|
||||
"""Base class for MELCloud entities."""
|
||||
|
||||
_attr_has_entity_name = True
|
||||
|
||||
@property
|
||||
def available(self) -> bool:
|
||||
"""Return True if entity is available."""
|
||||
return super().available and self.coordinator.device_available
|
||||
@@ -6,6 +6,6 @@
|
||||
"documentation": "https://www.home-assistant.io/integrations/melcloud",
|
||||
"integration_type": "device",
|
||||
"iot_class": "cloud_polling",
|
||||
"loggers": ["melcloud"],
|
||||
"loggers": ["pymelcloud"],
|
||||
"requirements": ["python-melcloud==0.1.2"]
|
||||
}
|
||||
|
||||
@@ -19,8 +19,7 @@ from homeassistant.const import UnitOfEnergy, UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .entity import MelCloudEntity
|
||||
from . import MelCloudConfigEntry, MelCloudDevice
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True, kw_only=True)
|
||||
@@ -112,67 +111,70 @@ ATW_ZONE_SENSORS: tuple[MelcloudSensorEntityDescription, ...] = (
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
hass: HomeAssistant,
|
||||
entry: MelCloudConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MELCloud device sensors based on config_entry."""
|
||||
coordinators = entry.runtime_data
|
||||
mel_devices = entry.runtime_data
|
||||
|
||||
entities: list[MelDeviceSensor] = [
|
||||
MelDeviceSensor(coordinator, description)
|
||||
MelDeviceSensor(mel_device, description)
|
||||
for description in ATA_SENSORS
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATA, [])
|
||||
if description.enabled(coordinator)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATA]
|
||||
if description.enabled(mel_device)
|
||||
] + [
|
||||
MelDeviceSensor(coordinator, description)
|
||||
MelDeviceSensor(mel_device, description)
|
||||
for description in ATW_SENSORS
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
|
||||
if description.enabled(coordinator)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATW]
|
||||
if description.enabled(mel_device)
|
||||
]
|
||||
entities.extend(
|
||||
[
|
||||
AtwZoneSensor(coordinator, zone, description)
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
|
||||
for zone in coordinator.device.zones
|
||||
AtwZoneSensor(mel_device, zone, description)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATW]
|
||||
for zone in mel_device.device.zones
|
||||
for description in ATW_ZONE_SENSORS
|
||||
if description.enabled(zone)
|
||||
]
|
||||
)
|
||||
async_add_entities(entities)
|
||||
async_add_entities(entities, True)
|
||||
|
||||
|
||||
class MelDeviceSensor(MelCloudEntity, SensorEntity):
|
||||
class MelDeviceSensor(SensorEntity):
|
||||
"""Representation of a Sensor."""
|
||||
|
||||
entity_description: MelcloudSensorEntityDescription
|
||||
_attr_has_entity_name = True
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
api: MelCloudDevice,
|
||||
description: MelcloudSensorEntityDescription,
|
||||
) -> None:
|
||||
"""Initialize the sensor."""
|
||||
super().__init__(coordinator)
|
||||
self._api = api
|
||||
self.entity_description = description
|
||||
|
||||
self._attr_unique_id = (
|
||||
f"{coordinator.device.serial}-{coordinator.device.mac}-{description.key}"
|
||||
)
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = f"{api.device.serial}-{api.device.mac}-{description.key}"
|
||||
self._attr_device_info = api.device_info
|
||||
|
||||
@property
|
||||
def native_value(self) -> float | None:
|
||||
"""Return the state of the sensor."""
|
||||
return self.entity_description.value_fn(self.coordinator)
|
||||
return self.entity_description.value_fn(self._api)
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Retrieve latest state."""
|
||||
await self._api.async_update()
|
||||
|
||||
|
||||
class AtwZoneSensor(MelDeviceSensor):
|
||||
"""Air-to-Water zone sensor."""
|
||||
"""Air-to-Air device sensor."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
api: MelCloudDevice,
|
||||
zone: Zone,
|
||||
description: MelcloudSensorEntityDescription,
|
||||
) -> None:
|
||||
@@ -182,9 +184,9 @@ class AtwZoneSensor(MelDeviceSensor):
|
||||
description,
|
||||
key=f"{description.key}-zone-{zone.zone_index}",
|
||||
)
|
||||
super().__init__(coordinator, description)
|
||||
super().__init__(api, description)
|
||||
|
||||
self._attr_device_info = coordinator.zone_device_info(zone)
|
||||
self._attr_device_info = api.zone_device_info(zone)
|
||||
self._zone = zone
|
||||
|
||||
@property
|
||||
|
||||
@@ -43,9 +43,6 @@
|
||||
},
|
||||
"entity": {
|
||||
"sensor": {
|
||||
"energy_consumed": {
|
||||
"name": "Energy consumed"
|
||||
},
|
||||
"flow_temperature": {
|
||||
"name": "Flow temperature"
|
||||
},
|
||||
|
||||
@@ -21,27 +21,27 @@ from homeassistant.const import UnitOfTemperature
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
|
||||
from . import MelCloudConfigEntry, MelCloudDevice
|
||||
from .const import ATTR_STATUS
|
||||
from .coordinator import MelCloudConfigEntry, MelCloudDeviceUpdateCoordinator
|
||||
from .entity import MelCloudEntity
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
_hass: HomeAssistant,
|
||||
hass: HomeAssistant,
|
||||
entry: MelCloudConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
) -> None:
|
||||
"""Set up MelCloud device climate based on config_entry."""
|
||||
coordinators = entry.runtime_data
|
||||
mel_devices = entry.runtime_data
|
||||
async_add_entities(
|
||||
[
|
||||
AtwWaterHeater(coordinator, coordinator.device)
|
||||
for coordinator in coordinators.get(DEVICE_TYPE_ATW, [])
|
||||
]
|
||||
AtwWaterHeater(mel_device, mel_device.device)
|
||||
for mel_device in mel_devices[DEVICE_TYPE_ATW]
|
||||
],
|
||||
True,
|
||||
)
|
||||
|
||||
|
||||
class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
|
||||
class AtwWaterHeater(WaterHeaterEntity):
|
||||
"""Air-to-Water water heater."""
|
||||
|
||||
_attr_supported_features = (
|
||||
@@ -49,26 +49,27 @@ class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
|
||||
| WaterHeaterEntityFeature.ON_OFF
|
||||
| WaterHeaterEntityFeature.OPERATION_MODE
|
||||
)
|
||||
_attr_has_entity_name = True
|
||||
_attr_name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: MelCloudDeviceUpdateCoordinator,
|
||||
device: AtwDevice,
|
||||
) -> None:
|
||||
def __init__(self, api: MelCloudDevice, device: AtwDevice) -> None:
|
||||
"""Initialize water heater device."""
|
||||
super().__init__(coordinator)
|
||||
self._api = api
|
||||
self._device = device
|
||||
self._attr_unique_id = coordinator.device.serial
|
||||
self._attr_device_info = coordinator.device_info
|
||||
self._attr_unique_id = api.device.serial
|
||||
self._attr_device_info = api.device_info
|
||||
|
||||
async def async_turn_on(self, **_kwargs: Any) -> None:
|
||||
async def async_update(self) -> None:
|
||||
"""Update state from MELCloud."""
|
||||
await self._api.async_update()
|
||||
|
||||
async def async_turn_on(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity on."""
|
||||
await self.coordinator.async_set({PROPERTY_POWER: True})
|
||||
await self._device.set({PROPERTY_POWER: True})
|
||||
|
||||
async def async_turn_off(self, **_kwargs: Any) -> None:
|
||||
async def async_turn_off(self, **kwargs: Any) -> None:
|
||||
"""Turn the entity off."""
|
||||
await self.coordinator.async_set({PROPERTY_POWER: False})
|
||||
await self._device.set({PROPERTY_POWER: False})
|
||||
|
||||
@property
|
||||
def extra_state_attributes(self) -> dict[str, Any] | None:
|
||||
@@ -102,7 +103,7 @@ class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature."""
|
||||
await self.coordinator.async_set(
|
||||
await self._device.set(
|
||||
{
|
||||
PROPERTY_TARGET_TANK_TEMPERATURE: kwargs.get(
|
||||
"temperature", self.target_temperature
|
||||
@@ -112,7 +113,7 @@ class AtwWaterHeater(MelCloudEntity, WaterHeaterEntity):
|
||||
|
||||
async def async_set_operation_mode(self, operation_mode: str) -> None:
|
||||
"""Set new target operation mode."""
|
||||
await self.coordinator.async_set({PROPERTY_OPERATION_MODE: operation_mode})
|
||||
await self._device.set({PROPERTY_OPERATION_MODE: operation_mode})
|
||||
|
||||
@property
|
||||
def min_temp(self) -> float:
|
||||
|
||||
@@ -7,7 +7,6 @@ from mill_local import OperationMode
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ATTR_HVAC_MODE,
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
@@ -112,16 +111,13 @@ class MillHeater(MillBaseEntity, ClimateEntity):
|
||||
super().__init__(coordinator, device)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature and optionally HVAC mode."""
|
||||
"""Set new target temperature."""
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
|
||||
return
|
||||
await self.coordinator.mill_data_connection.set_heater_temp(
|
||||
self._id, float(temperature)
|
||||
)
|
||||
if (hvac_mode := kwargs.get(ATTR_HVAC_MODE)) is not None:
|
||||
await self.async_handle_set_hvac_mode_service(hvac_mode)
|
||||
else:
|
||||
await self.coordinator.async_request_refresh()
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
@@ -129,11 +125,12 @@ class MillHeater(MillBaseEntity, ClimateEntity):
|
||||
await self.coordinator.mill_data_connection.heater_control(
|
||||
self._id, power_status=True
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
elif hvac_mode == HVACMode.OFF:
|
||||
await self.coordinator.mill_data_connection.heater_control(
|
||||
self._id, power_status=False
|
||||
)
|
||||
await self.coordinator.async_request_refresh()
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@callback
|
||||
def _update_attr(self, device: mill.Heater) -> None:
|
||||
@@ -192,26 +189,25 @@ class LocalMillHeater(CoordinatorEntity[MillDataUpdateCoordinator], ClimateEntit
|
||||
self._update_attr()
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set new target temperature and optionally HVAC mode."""
|
||||
"""Set new target temperature."""
|
||||
if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None:
|
||||
return
|
||||
await self.coordinator.mill_data_connection.set_target_temperature(
|
||||
float(temperature)
|
||||
)
|
||||
if (hvac_mode := kwargs.get(ATTR_HVAC_MODE)) is not None:
|
||||
await self.async_handle_set_hvac_mode_service(hvac_mode)
|
||||
else:
|
||||
await self.coordinator.async_request_refresh()
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set new target hvac mode."""
|
||||
if hvac_mode == HVACMode.HEAT:
|
||||
await self.coordinator.mill_data_connection.set_operation_mode_control_individually()
|
||||
await self.coordinator.async_request_refresh()
|
||||
elif hvac_mode == HVACMode.OFF:
|
||||
await self.coordinator.mill_data_connection.set_operation_mode_off()
|
||||
await self.coordinator.async_request_refresh()
|
||||
elif hvac_mode == HVACMode.AUTO:
|
||||
await self.coordinator.mill_data_connection.set_operation_mode_weekly_program()
|
||||
await self.coordinator.async_request_refresh()
|
||||
await self.coordinator.async_request_refresh()
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
|
||||
@@ -6,7 +6,6 @@ import asyncio
|
||||
from functools import partial
|
||||
from http import HTTPStatus
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
import aiohttp
|
||||
|
||||
@@ -48,7 +47,7 @@ from .util import supports_push
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def push_registrations(hass: HomeAssistant) -> dict[str, str]:
|
||||
def push_registrations(hass):
|
||||
"""Return a dictionary of push enabled registrations."""
|
||||
targets = {}
|
||||
|
||||
@@ -91,32 +90,38 @@ async def async_get_service(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> MobileAppNotificationService:
|
||||
"""Get the mobile_app notification service."""
|
||||
service = hass.data[DOMAIN][DATA_NOTIFY] = MobileAppNotificationService()
|
||||
service = hass.data[DOMAIN][DATA_NOTIFY] = MobileAppNotificationService(hass)
|
||||
return service
|
||||
|
||||
|
||||
class MobileAppNotificationService(BaseNotificationService):
|
||||
"""Implement the notification service for mobile_app."""
|
||||
|
||||
def __init__(self, hass):
|
||||
"""Initialize the service."""
|
||||
self._hass = hass
|
||||
|
||||
@property
|
||||
def targets(self) -> dict[str, str]:
|
||||
def targets(self):
|
||||
"""Return a dictionary of registered targets."""
|
||||
return push_registrations(self.hass)
|
||||
|
||||
async def async_send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
async def async_send_message(self, message="", **kwargs):
|
||||
"""Send a message to the Lambda APNS gateway."""
|
||||
data = {ATTR_MESSAGE: message}
|
||||
|
||||
# Remove default title from notifications.
|
||||
if (
|
||||
title_arg := kwargs.get(ATTR_TITLE)
|
||||
) is not None and title_arg != ATTR_TITLE_DEFAULT:
|
||||
data[ATTR_TITLE] = title_arg
|
||||
kwargs.get(ATTR_TITLE) is not None
|
||||
and kwargs.get(ATTR_TITLE) != ATTR_TITLE_DEFAULT
|
||||
):
|
||||
data[ATTR_TITLE] = kwargs.get(ATTR_TITLE)
|
||||
|
||||
if not (targets := kwargs.get(ATTR_TARGET)):
|
||||
targets = push_registrations(self.hass).values()
|
||||
|
||||
if (data_arg := kwargs.get(ATTR_DATA)) is not None:
|
||||
data[ATTR_DATA] = data_arg
|
||||
if kwargs.get(ATTR_DATA) is not None:
|
||||
data[ATTR_DATA] = kwargs.get(ATTR_DATA)
|
||||
|
||||
local_push_channels = self.hass.data[DOMAIN][DATA_PUSH_CHANNEL]
|
||||
|
||||
@@ -161,7 +166,7 @@ class MobileAppNotificationService(BaseNotificationService):
|
||||
|
||||
try:
|
||||
async with asyncio.timeout(10):
|
||||
response = await async_get_clientsession(self.hass).post(
|
||||
response = await async_get_clientsession(self._hass).post(
|
||||
push_url, json=target_data
|
||||
)
|
||||
result = await response.json()
|
||||
|
||||
@@ -73,6 +73,7 @@ ABBREVIATIONS = {
|
||||
"fan_mode_stat_t": "fan_mode_state_topic",
|
||||
"frc_upd": "force_update",
|
||||
"g_tpl": "green_template",
|
||||
"grp": "group",
|
||||
"hs_cmd_t": "hs_command_topic",
|
||||
"hs_cmd_tpl": "hs_command_template",
|
||||
"hs_stat_t": "hs_state_topic",
|
||||
|
||||
@@ -10,6 +10,7 @@ from homeassistant.helpers import config_validation as cv
|
||||
from .const import (
|
||||
CONF_COMMAND_TOPIC,
|
||||
CONF_ENCODING,
|
||||
CONF_GROUP,
|
||||
CONF_QOS,
|
||||
CONF_RETAIN,
|
||||
CONF_STATE_TOPIC,
|
||||
@@ -23,6 +24,7 @@ from .util import valid_publish_topic, valid_qos_schema, valid_subscribe_topic
|
||||
SCHEMA_BASE = {
|
||||
vol.Optional(CONF_QOS, default=DEFAULT_QOS): valid_qos_schema,
|
||||
vol.Optional(CONF_ENCODING, default=DEFAULT_ENCODING): cv.string,
|
||||
vol.Optional(CONF_GROUP): vol.All(cv.ensure_list, [cv.string]),
|
||||
}
|
||||
|
||||
MQTT_BASE_SCHEMA = vol.Schema(SCHEMA_BASE)
|
||||
|
||||
@@ -110,6 +110,7 @@ CONF_FLASH_TIME_SHORT = "flash_time_short"
|
||||
CONF_GET_POSITION_TEMPLATE = "position_template"
|
||||
CONF_GET_POSITION_TOPIC = "position_topic"
|
||||
CONF_GREEN_TEMPLATE = "green_template"
|
||||
CONF_GROUP = "group"
|
||||
CONF_HS_COMMAND_TEMPLATE = "hs_command_template"
|
||||
CONF_HS_COMMAND_TOPIC = "hs_command_topic"
|
||||
CONF_HS_STATE_TOPIC = "hs_state_topic"
|
||||
|
||||
@@ -79,6 +79,7 @@ from .const import (
|
||||
CONF_ENABLED_BY_DEFAULT,
|
||||
CONF_ENCODING,
|
||||
CONF_ENTITY_PICTURE,
|
||||
CONF_GROUP,
|
||||
CONF_HW_VERSION,
|
||||
CONF_IDENTIFIERS,
|
||||
CONF_JSON_ATTRS_TEMPLATE,
|
||||
@@ -136,6 +137,7 @@ MQTT_ATTRIBUTES_BLOCKED = {
|
||||
"device_class",
|
||||
"device_info",
|
||||
"entity_category",
|
||||
"entity_id",
|
||||
"entity_picture",
|
||||
"entity_registry_enabled_default",
|
||||
"extra_state_attributes",
|
||||
@@ -475,6 +477,8 @@ class MqttAttributesMixin(Entity):
|
||||
def __init__(self, config: ConfigType) -> None:
|
||||
"""Initialize the JSON attributes mixin."""
|
||||
self._attributes_sub_state: dict[str, EntitySubscription] = {}
|
||||
if CONF_GROUP in config:
|
||||
self._attr_included_unique_ids = config[CONF_GROUP]
|
||||
self._attributes_config = config
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
@@ -546,7 +550,7 @@ class MqttAttributesMixin(Entity):
|
||||
_LOGGER.warning("Erroneous JSON: %s", payload)
|
||||
else:
|
||||
if isinstance(json_dict, dict):
|
||||
filtered_dict = {
|
||||
filtered_dict: dict[str, Any] = {
|
||||
k: v
|
||||
for k, v in json_dict.items()
|
||||
if k not in MQTT_ATTRIBUTES_BLOCKED
|
||||
|
||||
@@ -3,7 +3,6 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from mycroftapi import MycroftAPI
|
||||
|
||||
@@ -11,8 +10,6 @@ from homeassistant.components.notify import BaseNotificationService
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
|
||||
|
||||
from . import DOMAIN
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -22,17 +19,17 @@ def get_service(
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> MycroftNotificationService:
|
||||
"""Get the Mycroft notification service."""
|
||||
return MycroftNotificationService(hass.data[DOMAIN])
|
||||
return MycroftNotificationService(hass.data["mycroft"])
|
||||
|
||||
|
||||
class MycroftNotificationService(BaseNotificationService):
|
||||
"""The Mycroft Notification Service."""
|
||||
|
||||
def __init__(self, mycroft_ip: str) -> None:
|
||||
def __init__(self, mycroft_ip):
|
||||
"""Initialize the service."""
|
||||
self.mycroft_ip = mycroft_ip
|
||||
|
||||
def send_message(self, message: str = "", **kwargs: Any) -> None:
|
||||
def send_message(self, message="", **kwargs):
|
||||
"""Send a message mycroft to speak on instance."""
|
||||
|
||||
text = message
|
||||
@@ -40,4 +37,4 @@ class MycroftNotificationService(BaseNotificationService):
|
||||
if mycroft is not None:
|
||||
mycroft.speak_text(text)
|
||||
else:
|
||||
_LOGGER.warning("Could not reach this instance of mycroft")
|
||||
_LOGGER.log("Could not reach this instance of mycroft")
|
||||
|
||||
@@ -1,20 +1,25 @@
|
||||
"""Support for namecheap DNS services."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
import defusedxml.ElementTree as ET
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import SOURCE_IMPORT
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.event import async_track_time_interval
|
||||
from homeassistant.helpers.typing import ConfigType
|
||||
|
||||
from .const import DOMAIN
|
||||
from .coordinator import NamecheapConfigEntry, NamecheapDnsUpdateCoordinator
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
DOMAIN = "namecheapdns"
|
||||
|
||||
INTERVAL = timedelta(minutes=5)
|
||||
|
||||
UPDATE_URL = "https://dynamicdns.park-your-domain.com/update"
|
||||
|
||||
CONFIG_SCHEMA = vol.Schema(
|
||||
{
|
||||
@@ -32,30 +37,37 @@ CONFIG_SCHEMA = vol.Schema(
|
||||
|
||||
async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool:
|
||||
"""Initialize the namecheap DNS component."""
|
||||
host = config[DOMAIN][CONF_HOST]
|
||||
domain = config[DOMAIN][CONF_DOMAIN]
|
||||
password = config[DOMAIN][CONF_PASSWORD]
|
||||
|
||||
if DOMAIN in config:
|
||||
hass.async_create_task(
|
||||
hass.config_entries.flow.async_init(
|
||||
DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN]
|
||||
)
|
||||
)
|
||||
session = async_get_clientsession(hass)
|
||||
|
||||
result = await _update_namecheapdns(session, host, domain, password)
|
||||
|
||||
if not result:
|
||||
return False
|
||||
|
||||
async def update_domain_interval(now):
|
||||
"""Update the namecheap DNS entry."""
|
||||
await _update_namecheapdns(session, host, domain, password)
|
||||
|
||||
async_track_time_interval(hass, update_domain_interval, INTERVAL)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
async def _update_namecheapdns(session, host, domain, password):
|
||||
"""Update namecheap DNS entry."""
|
||||
params = {"host": host, "domain": domain, "password": password}
|
||||
|
||||
resp = await session.get(UPDATE_URL, params=params)
|
||||
xml_string = await resp.text()
|
||||
root = ET.fromstring(xml_string)
|
||||
err_count = root.find("ErrCount").text
|
||||
|
||||
if int(err_count) != 0:
|
||||
_LOGGER.warning("Updating namecheap domain failed: %s", domain)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_setup_entry(hass: HomeAssistant, entry: NamecheapConfigEntry) -> bool:
|
||||
"""Set up Namecheap DynamicDNS from a config entry."""
|
||||
|
||||
coordinator = NamecheapDnsUpdateCoordinator(hass, entry)
|
||||
await coordinator.async_config_entry_first_refresh()
|
||||
entry.runtime_data = coordinator
|
||||
|
||||
# Add a dummy listener as we do not have regular entities
|
||||
entry.async_on_unload(coordinator.async_add_listener(lambda: None))
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def async_unload_entry(hass: HomeAssistant, entry: NamecheapConfigEntry) -> bool:
|
||||
"""Unload a config entry."""
|
||||
return True
|
||||
|
||||
@@ -1,139 +0,0 @@
|
||||
"""Config flow for the Namecheap DynamicDNS integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from aiohttp import ClientError
|
||||
import voluptuous as vol
|
||||
|
||||
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_NAME, CONF_PASSWORD
|
||||
from homeassistant.helpers import config_validation as cv
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.selector import (
|
||||
TextSelector,
|
||||
TextSelectorConfig,
|
||||
TextSelectorType,
|
||||
)
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import update_namecheapdns
|
||||
from .issue import deprecate_yaml_issue
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
STEP_USER_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_HOST, default="@"): cv.string,
|
||||
vol.Required(CONF_DOMAIN): cv.string,
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD, autocomplete="current-password"
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
STEP_RECONFIGURE_DATA_SCHEMA = vol.Schema(
|
||||
{
|
||||
vol.Required(CONF_PASSWORD): TextSelector(
|
||||
TextSelectorConfig(
|
||||
type=TextSelectorType.PASSWORD, autocomplete="current-password"
|
||||
)
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class NamecheapDnsConfigFlow(ConfigFlow, domain=DOMAIN):
|
||||
"""Handle a config flow for Namecheap DynamicDNS."""
|
||||
|
||||
async def async_step_user(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle the initial step."""
|
||||
errors: dict[str, str] = {}
|
||||
if user_input is not None:
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: user_input[CONF_HOST], CONF_DOMAIN: user_input[CONF_DOMAIN]}
|
||||
)
|
||||
session = async_get_clientsession(self.hass)
|
||||
try:
|
||||
if not await update_namecheapdns(session, **user_input):
|
||||
errors["base"] = "update_failed"
|
||||
except ClientError:
|
||||
_LOGGER.debug("Cannot connect", exc_info=True)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
return self.async_create_entry(
|
||||
title=f"{user_input[CONF_HOST]}.{user_input[CONF_DOMAIN]}",
|
||||
data=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="user",
|
||||
data_schema=self.add_suggested_values_to_schema(
|
||||
data_schema=STEP_USER_DATA_SCHEMA, suggested_values=user_input
|
||||
),
|
||||
errors=errors,
|
||||
description_placeholders={"account_panel": "https://ap.www.namecheap.com/"},
|
||||
)
|
||||
|
||||
async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult:
|
||||
"""Import config from yaml."""
|
||||
|
||||
self._async_abort_entries_match(
|
||||
{CONF_HOST: import_info[CONF_HOST], CONF_DOMAIN: import_info[CONF_DOMAIN]}
|
||||
)
|
||||
result = await self.async_step_user(import_info)
|
||||
if errors := result.get("errors"):
|
||||
deprecate_yaml_issue(self.hass, import_success=False)
|
||||
return self.async_abort(reason=errors["base"])
|
||||
|
||||
deprecate_yaml_issue(self.hass, import_success=True)
|
||||
return result
|
||||
|
||||
async def async_step_reconfigure(
|
||||
self, user_input: dict[str, Any] | None = None
|
||||
) -> ConfigFlowResult:
|
||||
"""Handle reconfigure flow."""
|
||||
errors: dict[str, str] = {}
|
||||
|
||||
entry = self._get_reconfigure_entry()
|
||||
|
||||
if user_input is not None:
|
||||
session = async_get_clientsession(self.hass)
|
||||
try:
|
||||
if not await update_namecheapdns(
|
||||
session,
|
||||
entry.data[CONF_HOST],
|
||||
entry.data[CONF_DOMAIN],
|
||||
user_input[CONF_PASSWORD],
|
||||
):
|
||||
errors["base"] = "update_failed"
|
||||
except ClientError:
|
||||
_LOGGER.debug("Cannot connect", exc_info=True)
|
||||
errors["base"] = "cannot_connect"
|
||||
except Exception:
|
||||
_LOGGER.exception("Unexpected exception")
|
||||
errors["base"] = "unknown"
|
||||
|
||||
if not errors:
|
||||
return self.async_update_reload_and_abort(
|
||||
entry,
|
||||
data_updates=user_input,
|
||||
)
|
||||
|
||||
return self.async_show_form(
|
||||
step_id="reconfigure",
|
||||
data_schema=STEP_RECONFIGURE_DATA_SCHEMA,
|
||||
errors=errors,
|
||||
description_placeholders={CONF_NAME: entry.title},
|
||||
)
|
||||
@@ -1,6 +0,0 @@
|
||||
"""Constants for the Namecheap DynamicDNS integration."""
|
||||
|
||||
DOMAIN = "namecheapdns"
|
||||
|
||||
|
||||
UPDATE_URL = "https://dynamicdns.park-your-domain.com/update"
|
||||
@@ -1,61 +0,0 @@
|
||||
"""Coordinator for the Namecheap DynamicDNS integration."""
|
||||
|
||||
from datetime import timedelta
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientError
|
||||
|
||||
from homeassistant.config_entries import ConfigEntry
|
||||
from homeassistant.const import CONF_DOMAIN, CONF_HOST, CONF_PASSWORD
|
||||
from homeassistant.core import HomeAssistant
|
||||
from homeassistant.helpers.aiohttp_client import async_get_clientsession
|
||||
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
|
||||
|
||||
from .const import DOMAIN
|
||||
from .helpers import update_namecheapdns
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
type NamecheapConfigEntry = ConfigEntry[NamecheapDnsUpdateCoordinator]
|
||||
|
||||
|
||||
INTERVAL = timedelta(minutes=5)
|
||||
|
||||
|
||||
class NamecheapDnsUpdateCoordinator(DataUpdateCoordinator[None]):
|
||||
"""Namecheap DynamicDNS update coordinator."""
|
||||
|
||||
config_entry: NamecheapConfigEntry
|
||||
|
||||
def __init__(self, hass: HomeAssistant, config_entry: NamecheapConfigEntry) -> None:
|
||||
"""Initialize the Namecheap DynamicDNS update coordinator."""
|
||||
super().__init__(
|
||||
hass,
|
||||
_LOGGER,
|
||||
config_entry=config_entry,
|
||||
name=DOMAIN,
|
||||
update_interval=INTERVAL,
|
||||
)
|
||||
|
||||
self.session = async_get_clientsession(hass)
|
||||
|
||||
async def _async_update_data(self) -> None:
|
||||
"""Update Namecheap DNS."""
|
||||
host = self.config_entry.data[CONF_HOST]
|
||||
domain = self.config_entry.data[CONF_DOMAIN]
|
||||
password = self.config_entry.data[CONF_PASSWORD]
|
||||
|
||||
try:
|
||||
if not await update_namecheapdns(self.session, host, domain, password):
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="update_failed",
|
||||
translation_placeholders={CONF_DOMAIN: f"{host}.{domain}"},
|
||||
)
|
||||
except ClientError as e:
|
||||
raise UpdateFailed(
|
||||
translation_domain=DOMAIN,
|
||||
translation_key="connection_error",
|
||||
translation_placeholders={CONF_DOMAIN: f"{host}.{domain}"},
|
||||
) from e
|
||||
@@ -1,24 +0,0 @@
|
||||
"""Helpers for the Namecheap DynamicDNS integration."""
|
||||
|
||||
import logging
|
||||
|
||||
from aiohttp import ClientSession
|
||||
|
||||
from .const import UPDATE_URL
|
||||
|
||||
_LOGGER = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def update_namecheapdns(
|
||||
session: ClientSession, host: str, domain: str, password: str
|
||||
):
|
||||
"""Update namecheap DNS entry."""
|
||||
params = {"host": host, "domain": domain, "password": password}
|
||||
|
||||
resp = await session.get(UPDATE_URL, params=params)
|
||||
xml_string = await resp.text()
|
||||
|
||||
if "<ErrCount>0</ErrCount>" not in xml_string:
|
||||
return False
|
||||
|
||||
return True
|
||||
@@ -1,40 +0,0 @@
|
||||
"""Issues for Namecheap DynamicDNS integration."""
|
||||
|
||||
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback
|
||||
from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue
|
||||
|
||||
from .const import DOMAIN
|
||||
|
||||
|
||||
@callback
|
||||
def deprecate_yaml_issue(hass: HomeAssistant, *, import_success: bool) -> None:
|
||||
"""Deprecate yaml issue."""
|
||||
if import_success:
|
||||
async_create_issue(
|
||||
hass,
|
||||
HOMEASSISTANT_DOMAIN,
|
||||
f"deprecated_yaml_{DOMAIN}",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
breaks_in_ha_version="2026.8.0",
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml",
|
||||
translation_placeholders={
|
||||
"domain": DOMAIN,
|
||||
"integration_title": "Namecheap DynamicDNS",
|
||||
},
|
||||
)
|
||||
else:
|
||||
async_create_issue(
|
||||
hass,
|
||||
DOMAIN,
|
||||
"deprecated_yaml_import_issue_error",
|
||||
breaks_in_ha_version="2026.8.0",
|
||||
is_fixable=False,
|
||||
issue_domain=DOMAIN,
|
||||
severity=IssueSeverity.WARNING,
|
||||
translation_key="deprecated_yaml_import_issue_error",
|
||||
translation_placeholders={
|
||||
"url": f"/config/integrations/dashboard/add?domain={DOMAIN}"
|
||||
},
|
||||
)
|
||||
@@ -1,10 +1,9 @@
|
||||
{
|
||||
"domain": "namecheapdns",
|
||||
"name": "Namecheap DynamicDNS",
|
||||
"codeowners": ["@tr4nt0r"],
|
||||
"config_flow": true,
|
||||
"codeowners": [],
|
||||
"documentation": "https://www.home-assistant.io/integrations/namecheapdns",
|
||||
"integration_type": "service",
|
||||
"iot_class": "cloud_push",
|
||||
"requirements": []
|
||||
"quality_scale": "legacy",
|
||||
"requirements": ["defusedxml==0.7.1"]
|
||||
}
|
||||
|
||||
@@ -1,51 +0,0 @@
|
||||
{
|
||||
"config": {
|
||||
"abort": {
|
||||
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
|
||||
"reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]"
|
||||
},
|
||||
"error": {
|
||||
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
|
||||
"unknown": "[%key:common::config_flow::error::unknown%]",
|
||||
"update_failed": "Updating DNS failed"
|
||||
},
|
||||
"step": {
|
||||
"reconfigure": {
|
||||
"data": {
|
||||
"password": "[%key:component::namecheapdns::config::step::user::data::password%]"
|
||||
},
|
||||
"data_description": {
|
||||
"password": "[%key:component::namecheapdns::config::step::user::data_description::password%]"
|
||||
},
|
||||
"title": "Re-configure {name}"
|
||||
},
|
||||
"user": {
|
||||
"data": {
|
||||
"domain": "[%key:common::config_flow::data::username%]",
|
||||
"host": "[%key:common::config_flow::data::host%]",
|
||||
"password": "Dynamic DNS password"
|
||||
},
|
||||
"data_description": {
|
||||
"domain": "The domain to update ('example.com')",
|
||||
"host": "The host to update ('home' for home.example.com). Use '@' to update the root domain",
|
||||
"password": "Dynamic DNS password for the domain"
|
||||
},
|
||||
"description": "Enter your Namecheap DynamicDNS domain and password below to configure dynamic DNS updates. You can find the Dynamic DNS password in your [Namecheap account]({account_panel}) under Domain List > Manage > Advanced DNS > Dynamic DNS."
|
||||
}
|
||||
}
|
||||
},
|
||||
"exceptions": {
|
||||
"connection_error": {
|
||||
"message": "Updating Namecheap DynamicDNS domain {domain} failed due to a connection error"
|
||||
},
|
||||
"update_failed": {
|
||||
"message": "Updating Namecheap DynamicDNS domain {domain} failed"
|
||||
}
|
||||
},
|
||||
"issues": {
|
||||
"deprecated_yaml_import_issue_error": {
|
||||
"description": "Configuring Namecheap DynamicDNS using YAML is being removed but there was an error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Namecheap DynamicDNS YAML configuration from your `configuration.yaml` file and continue to [set up the integration]({url}) manually.",
|
||||
"title": "The Namecheap DynamicDNS YAML configuration import failed"
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,7 +21,6 @@ from .nasweb_data import NASwebData
|
||||
|
||||
PLATFORMS: list[Platform] = [
|
||||
Platform.ALARM_CONTROL_PANEL,
|
||||
Platform.CLIMATE,
|
||||
Platform.SENSOR,
|
||||
Platform.SWITCH,
|
||||
]
|
||||
|
||||
@@ -1,168 +0,0 @@
|
||||
"""Platform for NASweb thermostat."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import time
|
||||
from typing import Any
|
||||
|
||||
from webio_api import Thermostat as NASwebThermostat
|
||||
from webio_api.const import KEY_THERMOSTAT
|
||||
|
||||
from homeassistant.components.climate import (
|
||||
ClimateEntity,
|
||||
ClimateEntityFeature,
|
||||
HVACAction,
|
||||
HVACMode,
|
||||
UnitOfTemperature,
|
||||
)
|
||||
from homeassistant.components.sensor import SensorDeviceClass
|
||||
from homeassistant.core import HomeAssistant, callback
|
||||
from homeassistant.helpers.device_registry import DeviceInfo
|
||||
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
|
||||
from homeassistant.helpers.typing import DiscoveryInfoType
|
||||
from homeassistant.helpers.update_coordinator import (
|
||||
BaseCoordinatorEntity,
|
||||
BaseDataUpdateCoordinatorProtocol,
|
||||
)
|
||||
|
||||
from . import NASwebConfigEntry
|
||||
from .const import DOMAIN, STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
|
||||
CLIMATE_TRANSLATION_KEY = "thermostat"
|
||||
|
||||
|
||||
async def async_setup_entry(
|
||||
hass: HomeAssistant,
|
||||
config: NASwebConfigEntry,
|
||||
async_add_entities: AddConfigEntryEntitiesCallback,
|
||||
discovery_info: DiscoveryInfoType | None = None,
|
||||
) -> None:
|
||||
"""Set up Climate platform."""
|
||||
coordinator = config.runtime_data
|
||||
nasweb_thermostat: NASwebThermostat = coordinator.data[KEY_THERMOSTAT]
|
||||
climate = Thermostat(coordinator, nasweb_thermostat)
|
||||
async_add_entities([climate])
|
||||
|
||||
|
||||
class Thermostat(ClimateEntity, BaseCoordinatorEntity):
|
||||
"""Entity representing NASweb thermostat."""
|
||||
|
||||
_attr_device_class = SensorDeviceClass.TEMPERATURE
|
||||
_attr_has_entity_name = True
|
||||
_attr_hvac_modes = [
|
||||
HVACMode.OFF,
|
||||
HVACMode.HEAT,
|
||||
HVACMode.COOL,
|
||||
HVACMode.HEAT_COOL,
|
||||
HVACMode.FAN_ONLY,
|
||||
]
|
||||
_attr_max_temp = 50
|
||||
_attr_min_temp = -50
|
||||
_attr_precision = 1.0
|
||||
_attr_should_poll = False
|
||||
_attr_supported_features = ClimateEntityFeature(
|
||||
ClimateEntityFeature.TARGET_TEMPERATURE_RANGE
|
||||
)
|
||||
_attr_target_temperature_step = 1.0
|
||||
_attr_temperature_unit = UnitOfTemperature.CELSIUS
|
||||
_attr_translation_key = CLIMATE_TRANSLATION_KEY
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
coordinator: BaseDataUpdateCoordinatorProtocol,
|
||||
nasweb_thermostat: NASwebThermostat,
|
||||
) -> None:
|
||||
"""Initialize Thermostat."""
|
||||
super().__init__(coordinator)
|
||||
self._thermostat = nasweb_thermostat
|
||||
self._attr_available = False
|
||||
self._attr_name = nasweb_thermostat.name
|
||||
self._attr_unique_id = f"{DOMAIN}.{self._thermostat.webio_serial}.thermostat"
|
||||
self._attr_device_info = DeviceInfo(
|
||||
identifiers={(DOMAIN, self._thermostat.webio_serial)}
|
||||
)
|
||||
|
||||
async def async_added_to_hass(self) -> None:
|
||||
"""When entity is added to hass."""
|
||||
await super().async_added_to_hass()
|
||||
self._handle_coordinator_update()
|
||||
|
||||
def _set_attr_available(
|
||||
self, entity_last_update: float, available: bool | None
|
||||
) -> None:
|
||||
if (
|
||||
self.coordinator.last_update is None
|
||||
or time.time() - entity_last_update >= STATUS_UPDATE_MAX_TIME_INTERVAL
|
||||
):
|
||||
self._attr_available = False
|
||||
else:
|
||||
self._attr_available = available if available is not None else False
|
||||
|
||||
@callback
|
||||
def _handle_coordinator_update(self) -> None:
|
||||
"""Handle updated data from the coordinator."""
|
||||
self._attr_current_temperature = self._thermostat.current_temp
|
||||
self._attr_target_temperature_low = self._thermostat.temp_target_min
|
||||
self._attr_target_temperature_high = self._thermostat.temp_target_max
|
||||
self._attr_hvac_mode = self._get_current_hvac_mode()
|
||||
self._attr_hvac_action = self._get_current_action()
|
||||
self._attr_name = self._thermostat.name if self._thermostat.name else None
|
||||
self._set_attr_available(
|
||||
self._thermostat.last_update, self._thermostat.available
|
||||
)
|
||||
self.async_write_ha_state()
|
||||
|
||||
def _get_current_hvac_mode(self) -> HVACMode:
|
||||
have_cooling = self._thermostat.enabled_above_output
|
||||
have_heating = self._thermostat.enabled_below_output
|
||||
if have_cooling and have_heating:
|
||||
return HVACMode.HEAT_COOL
|
||||
if have_cooling:
|
||||
return HVACMode.COOL
|
||||
if have_heating:
|
||||
return HVACMode.HEAT
|
||||
if self._thermostat.enabled_inrange_output:
|
||||
return HVACMode.FAN_ONLY
|
||||
return HVACMode.OFF
|
||||
|
||||
def _get_current_action(self) -> HVACAction:
|
||||
if self._thermostat.current_temp is None:
|
||||
return HVACAction.OFF
|
||||
if (
|
||||
self._thermostat.temp_target_min is not None
|
||||
and self._thermostat.current_temp < self._thermostat.temp_target_min
|
||||
and self._thermostat.enabled_below_output
|
||||
):
|
||||
return HVACAction.HEATING
|
||||
if (
|
||||
self._thermostat.temp_target_max is not None
|
||||
and self._thermostat.current_temp > self._thermostat.temp_target_max
|
||||
and self._thermostat.enabled_above_output
|
||||
):
|
||||
return HVACAction.COOLING
|
||||
if (
|
||||
self._thermostat.temp_target_min is not None
|
||||
and self._thermostat.temp_target_max is not None
|
||||
and self._thermostat.current_temp >= self._thermostat.temp_target_min
|
||||
and self._thermostat.current_temp <= self._thermostat.temp_target_max
|
||||
and self._thermostat.enabled_inrange_output
|
||||
):
|
||||
return HVACAction.FAN
|
||||
return HVACAction.IDLE
|
||||
|
||||
async def async_update(self) -> None:
|
||||
"""Update the entity.
|
||||
|
||||
Only used by the generic entity update service.
|
||||
Scheduling updates is not necessary, the coordinator takes care of updates via push notifications.
|
||||
"""
|
||||
|
||||
async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None:
|
||||
"""Set HVACMode for Thermostat."""
|
||||
await self._thermostat.set_hvac_mode(hvac_mode)
|
||||
|
||||
async def async_set_temperature(self, **kwargs: Any) -> None:
|
||||
"""Set temperature range for Thermostat."""
|
||||
await self._thermostat.set_temperature(
|
||||
kwargs["target_temp_low"], kwargs["target_temp_high"]
|
||||
)
|
||||
@@ -23,7 +23,6 @@ _LOGGER = logging.getLogger(__name__)
|
||||
|
||||
KEY_INPUTS = "inputs"
|
||||
KEY_OUTPUTS = "outputs"
|
||||
KEY_THERMOSTAT = "thermostat"
|
||||
KEY_ZONES = "zones"
|
||||
|
||||
|
||||
@@ -105,7 +104,6 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_THERMOSTAT: self.webio_api.thermostat,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(data)
|
||||
@@ -201,7 +199,6 @@ class NASwebCoordinator(BaseDataUpdateCoordinatorProtocol):
|
||||
KEY_OUTPUTS: self.webio_api.outputs,
|
||||
KEY_INPUTS: self.webio_api.inputs,
|
||||
KEY_TEMP_SENSOR: self.webio_api.temp_sensor,
|
||||
KEY_THERMOSTAT: self.webio_api.thermostat,
|
||||
KEY_ZONES: self.webio_api.zones,
|
||||
}
|
||||
self.async_set_updated_data(new_data)
|
||||
|
||||
@@ -29,11 +29,6 @@
|
||||
"name": "Zone {index}"
|
||||
}
|
||||
},
|
||||
"climate": {
|
||||
"thermostat": {
|
||||
"name": "[%key:component::climate::entity_component::_::name%]"
|
||||
}
|
||||
},
|
||||
"sensor": {
|
||||
"sensor_input": {
|
||||
"name": "Input {index}",
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user