Compare commits

...

27 Commits

Author SHA1 Message Date
Daniel Hjelseth Høyer
a9b984d705 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-16 16:47:20 +01:00
Daniel Hjelseth Høyer
886c0578e7 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 16:18:46 +01:00
Daniel Hjelseth Høyer
02e579c5ae tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 15:38:27 +01:00
Daniel Hjelseth Høyer
d47f3ca1d8 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 15:27:07 +01:00
Daniel Hjelseth Høyer
02e5f2c234 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 15:18:07 +01:00
Daniel Hjelseth Høyer
e42195bfed tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 14:39:42 +01:00
Daniel Hjelseth Høyer
b2944a6d66 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 13:55:08 +01:00
Daniel Hjelseth Høyer
03d15fb70c tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 13:46:40 +01:00
Daniel Hjelseth Høyer
01d57ddcf1 tibber refactor
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 13:34:15 +01:00
Daniel Hjelseth Høyer
cfc85cfd29 clean up
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 12:37:13 +01:00
Daniel Hjelseth Høyer
ca2dc20709 Refactor Tibber
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-15 03:22:39 +01:00
Denis Shulyaka
3840f7a767 Bump openai to 2.21.0 (#163032) 2026-02-14 20:08:45 -05:00
Jordan Harvey
af2d2a857a Add bedtime end time entity Nintendo parental controls (#160927) 2026-02-14 22:51:20 +01:00
jameson_uk
31970255a2 Add air quality monitor sensors to Alexa Devices (#162095)
Co-authored-by: Norbert Rittel <norbert@rittel.de>
2026-02-14 22:29:11 +01:00
Daniel Hjelseth Høyer
f30397a11a Update homevolt quality scale (#163022)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
2026-02-14 22:05:03 +01:00
Denis Shulyaka
cbcfc43c5a Add reauthentication to Anthropic (#163019) 2026-02-14 21:53:25 +01:00
mettolen
acaa2aeeee Add switch entities to Liebherr integration (#162688) 2026-02-14 21:41:06 +01:00
Denis Shulyaka
c67c19413b Improve Anthropic coverage (#163011) 2026-02-14 21:33:53 +01:00
Paul Tarjan
8840d2f0ef Add entity descriptions to Hikvision binary sensors (#160875)
Co-authored-by: Claude <noreply@anthropic.com>
2026-02-14 21:32:39 +01:00
Daniel Hjelseth Høyer
82fb3c35dc Add zeroconf support to Homevolt (#162897)
Signed-off-by: Daniel Hjelseth Høyer <github@dahoiv.net>
Co-authored-by: Copilot <175728472+Copilot@users.noreply.github.com>
2026-02-14 21:24:16 +01:00
Franck Nijhof
4d0d5d6817 CI security hardening actions/checkout to not persist-credentials (#162991) 2026-02-14 21:11:43 +01:00
Denis Shulyaka
12584482a2 Add data descriptions for Anthropic data flow (#162961) 2026-02-14 22:34:33 +03:00
Denis Shulyaka
b47dd2f923 Enable strict typing check for Anthropic (#163013) 2026-02-14 19:04:29 +00:00
Ludovic BOUÉ
3d354da104 Added ppm support for the ozone device class in sensor (#162996) 2026-02-14 19:57:16 +01:00
wollew
89e900dca1 add switch platform for Velux on/off switches (#163002) 2026-02-14 15:36:51 +01:00
Patrick Vorgers
675884ad78 S3 backup - Improved buffer handling (#162955) 2026-02-14 15:26:08 +01:00
Franck Nijhof
efb6cdc17e Fix failing sftp_storage test (#163000) 2026-02-14 08:12:06 -06:00
75 changed files with 3444 additions and 532 deletions

View File

@@ -31,6 +31,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -97,6 +99,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Download nightly wheels of frontend
if: needs.init.outputs.channel == 'dev'
@@ -274,6 +278,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set build additional args
run: |
@@ -312,6 +318,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Initialize git
uses: home-assistant/actions/helpers/git-init@master
@@ -478,6 +486,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -523,6 +533,8 @@ jobs:
steps:
- name: Checkout repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Login to GitHub Container Registry
uses: docker/login-action@c94ce9fb468520275223c153574b00df6fe4bcc9 # v3.7.0

View File

@@ -97,6 +97,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Generate partial Python venv restore key
id: generate_python_cache_key
run: |
@@ -247,6 +249,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Register problem matchers
run: |
echo "::add-matcher::.github/workflows/matchers/yamllint.json"
@@ -277,6 +281,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Register hadolint problem matcher
run: |
echo "::add-matcher::.github/workflows/matchers/hadolint.json"
@@ -296,6 +302,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -448,6 +456,8 @@ jobs:
libturbojpeg
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -481,6 +491,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -513,6 +525,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -537,6 +551,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Dependency review
uses: actions/dependency-review-action@3c4e3dcb1aa7874d2c16be7d79418e9b7efd6261 # v4.8.2
with:
@@ -560,6 +576,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -603,6 +621,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -650,6 +670,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -695,6 +717,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -783,6 +807,8 @@ jobs:
libturbojpeg
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -854,6 +880,8 @@ jobs:
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -998,6 +1026,8 @@ jobs:
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -1151,6 +1181,8 @@ jobs:
postgresql-server-dev-14
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -1263,6 +1295,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Download all coverage artifacts
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
@@ -1320,6 +1354,8 @@ jobs:
libxml2-utils
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python-version }}
id: python
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0
@@ -1423,6 +1459,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Download all coverage artifacts
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:

View File

@@ -22,6 +22,8 @@ jobs:
steps:
- name: Check out code from GitHub
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Initialize CodeQL
uses: github/codeql-action/init@45cbd0c69e560cd9e7cd7f8c32362050c9b7ded2 # v4.32.2

View File

@@ -20,6 +20,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6.2.0

View File

@@ -31,6 +31,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
id: python
@@ -116,6 +118,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Download env_file
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
@@ -165,6 +169,8 @@ jobs:
steps:
- name: Checkout the repository
uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2
with:
persist-credentials: false
- name: Download env_file
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0

View File

@@ -84,6 +84,7 @@ homeassistant.components.androidtv_remote.*
homeassistant.components.anel_pwrctrl.*
homeassistant.components.anova.*
homeassistant.components.anthemav.*
homeassistant.components.anthropic.*
homeassistant.components.apache_kafka.*
homeassistant.components.apcupsd.*
homeassistant.components.api.*

View File

@@ -1,4 +1,11 @@
{
"entity": {
"sensor": {
"voc_index": {
"default": "mdi:molecule"
}
}
},
"services": {
"send_info_skill": {
"service": "mdi:information"

View File

@@ -20,7 +20,13 @@ from homeassistant.components.sensor import (
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import LIGHT_LUX, UnitOfTemperature
from homeassistant.const import (
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
CONCENTRATION_PARTS_PER_MILLION,
LIGHT_LUX,
PERCENTAGE,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
@@ -77,6 +83,41 @@ SENSORS: Final = (
native_unit_of_measurement=LIGHT_LUX,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="Humidity",
device_class=SensorDeviceClass.HUMIDITY,
native_unit_of_measurement=PERCENTAGE,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="PM10",
device_class=SensorDeviceClass.PM10,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="PM25",
device_class=SensorDeviceClass.PM25,
native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="CO",
device_class=SensorDeviceClass.CO,
native_unit_of_measurement=CONCENTRATION_PARTS_PER_MILLION,
state_class=SensorStateClass.MEASUREMENT,
),
AmazonSensorEntityDescription(
key="VOC",
# No device class as this is an index not a concentration
state_class=SensorStateClass.MEASUREMENT,
translation_key="voc_index",
),
AmazonSensorEntityDescription(
key="Air Quality",
device_class=SensorDeviceClass.AQI,
state_class=SensorStateClass.MEASUREMENT,
),
)
NOTIFICATIONS: Final = (
AmazonNotificationEntityDescription(

View File

@@ -75,6 +75,9 @@
},
"timer": {
"name": "Next timer"
},
"voc_index": {
"name": "Volatile organic compounds index"
}
},
"switch": {

View File

@@ -7,7 +7,7 @@ import anthropic
from homeassistant.config_entries import ConfigEntry, ConfigSubentry
from homeassistant.const import CONF_API_KEY, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers import (
config_validation as cv,
device_registry as dr,
@@ -47,8 +47,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
try:
await client.models.list(timeout=10.0)
except anthropic.AuthenticationError as err:
LOGGER.error("Invalid API key: %s", err)
return False
raise ConfigEntryAuthFailed(err) from err
except anthropic.AnthropicError as err:
raise ConfigEntryNotReady(err) from err
@@ -77,7 +76,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) ->
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
async def async_unload_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool:
"""Unload Anthropic."""
return await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
@@ -105,7 +104,7 @@ async def async_migrate_integration(hass: HomeAssistant) -> None:
if not any(entry.version == 1 for entry in entries):
return
api_keys_entries: dict[str, tuple[ConfigEntry, bool]] = {}
api_keys_entries: dict[str, tuple[AnthropicConfigEntry, bool]] = {}
entity_registry = er.async_get(hass)
device_registry = dr.async_get(hass)

View File

@@ -4,9 +4,9 @@ from __future__ import annotations
from json import JSONDecodeError
import logging
from typing import TYPE_CHECKING
from homeassistant.components import ai_task, conversation
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
@@ -14,12 +14,15 @@ from homeassistant.util.json import json_loads
from .entity import AnthropicBaseLLMEntity
if TYPE_CHECKING:
from . import AnthropicConfigEntry
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
config_entry: AnthropicConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up AI Task entities."""

View File

@@ -2,10 +2,11 @@
from __future__ import annotations
from collections.abc import Mapping
import json
import logging
import re
from typing import Any, cast
from typing import TYPE_CHECKING, Any, cast
import anthropic
import voluptuous as vol
@@ -13,7 +14,7 @@ from voluptuous_openapi import convert
from homeassistant.components.zone import ENTITY_ID_HOME
from homeassistant.config_entries import (
ConfigEntry,
SOURCE_REAUTH,
ConfigEntryState,
ConfigFlow,
ConfigFlowResult,
@@ -65,6 +66,9 @@ from .const import (
WEB_SEARCH_UNSUPPORTED_MODELS,
)
if TYPE_CHECKING:
from . import AnthropicConfigEntry
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
@@ -162,6 +166,10 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
if self.source == SOURCE_REAUTH:
return self.async_update_reload_and_abort(
self._get_reauth_entry(), data_updates=user_input
)
return self.async_create_entry(
title="Claude",
data=user_input,
@@ -182,13 +190,34 @@ class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN):
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
errors=errors or None,
description_placeholders={
"instructions_url": "https://www.home-assistant.io/integrations/anthropic/#generating-an-api-key",
},
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Dialog that informs the user that reauth is required."""
if not user_input:
return self.async_show_form(
step_id="reauth_confirm", data_schema=STEP_USER_DATA_SCHEMA
)
return await self.async_step_user(user_input)
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
cls, config_entry: AnthropicConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {

View File

@@ -3,13 +3,13 @@
from __future__ import annotations
from collections.abc import Iterator
from typing import cast
from typing import TYPE_CHECKING, cast
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.repairs import RepairsFlow
from homeassistant.config_entries import ConfigEntry, ConfigEntryState, ConfigSubentry
from homeassistant.config_entries import ConfigEntryState, ConfigSubentry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.selector import SelectSelector, SelectSelectorConfig
@@ -23,6 +23,9 @@ from .const import (
DOMAIN,
)
if TYPE_CHECKING:
from . import AnthropicConfigEntry
class ModelDeprecatedRepairFlow(RepairsFlow):
"""Handler for an issue fixing flow."""
@@ -110,7 +113,7 @@ class ModelDeprecatedRepairFlow(RepairsFlow):
async def _async_next_target(
self,
) -> tuple[ConfigEntry, ConfigSubentry, str] | None:
) -> tuple[AnthropicConfigEntry, ConfigSubentry, str] | None:
"""Return the next deprecated subentry target."""
if self._subentry_iter is None:
self._subentry_iter = self._iter_deprecated_subentries()

View File

@@ -1,7 +1,8 @@
{
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]"
"already_configured": "[%key:common::config_flow::abort::already_configured_service%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]"
},
"error": {
"authentication_error": "[%key:common::config_flow::error::invalid_auth%]",
@@ -10,10 +11,23 @@
"unknown": "[%key:common::config_flow::error::unknown%]"
},
"step": {
"reauth_confirm": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
},
"data_description": {
"api_key": "[%key:component::anthropic::config::step::user::data_description::api_key%]"
},
"description": "Reauthentication required. Please enter your updated API key."
},
"user": {
"data": {
"api_key": "[%key:common::config_flow::data::api_key%]"
}
},
"data_description": {
"api_key": "Your Anthropic API key."
},
"description": "Set up Anthropic integration by providing your Anthropic API key. Instructions to obtain an API key can be found in [the documentation]({instructions_url})."
}
}
},
@@ -35,6 +49,11 @@
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::max_tokens%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data::temperature%]"
},
"data_description": {
"chat_model": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::chat_model%]",
"max_tokens": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::max_tokens%]",
"temperature": "[%key:component::anthropic::config_subentries::conversation::step::advanced::data_description::temperature%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::advanced::title%]"
},
"init": {
@@ -42,6 +61,10 @@
"name": "[%key:common::config_flow::data::name%]",
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data::recommended%]"
},
"data_description": {
"name": "[%key:component::anthropic::config_subentries::conversation::step::init::data_description::name%]",
"recommended": "[%key:component::anthropic::config_subentries::conversation::step::init::data_description::recommended%]"
},
"title": "[%key:component::anthropic::config_subentries::conversation::step::init::title%]"
},
"model": {
@@ -80,6 +103,11 @@
"max_tokens": "Maximum tokens to return in response",
"temperature": "Temperature"
},
"data_description": {
"chat_model": "The model to serve the responses.",
"max_tokens": "Limit the number of response tokens.",
"temperature": "Control the randomness of the response, trading off between creativity and coherence."
},
"title": "Advanced settings"
},
"init": {
@@ -90,7 +118,10 @@
"recommended": "Recommended model settings"
},
"data_description": {
"prompt": "Instruct how the LLM should respond. This can be a template."
"llm_hass_api": "Allow the LLM to control Home Assistant.",
"name": "The name of this configuration",
"prompt": "Instruct how the LLM should respond. This can be a template.",
"recommended": "Use default configuration"
},
"title": "Basic settings"
},
@@ -122,6 +153,9 @@
"data": {
"chat_model": "[%key:common::generic::model%]"
},
"data_description": {
"chat_model": "Select the new model to use."
},
"description": "You are updating {subentry_name} ({subentry_type}) in {entry_name}. The current model {model} is deprecated. Select a supported model to continue.",
"title": "Update model"
}

View File

@@ -5,7 +5,7 @@ import functools
import json
import logging
from time import time
from typing import Any
from typing import Any, cast
from botocore.exceptions import BotoCoreError
@@ -189,48 +189,68 @@ class S3BackupAgent(BackupAgent):
)
upload_id = multipart_upload["UploadId"]
try:
parts = []
parts: list[dict[str, Any]] = []
part_number = 1
buffer_size = 0 # bytes
buffer: list[bytes] = []
buffer = bytearray() # bytes buffer to store the data
offset = 0 # start index of unread data inside buffer
stream = await open_stream()
async for chunk in stream:
buffer_size += len(chunk)
buffer.append(chunk)
buffer.extend(chunk)
# If buffer size meets minimum part size, upload it as a part
if buffer_size >= MULTIPART_MIN_PART_SIZE_BYTES:
_LOGGER.debug(
"Uploading part number %d, size %d", part_number, buffer_size
)
part = await self._client.upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
buffer_size = 0
buffer = []
# Upload parts of exactly MULTIPART_MIN_PART_SIZE_BYTES to ensure
# all non-trailing parts have the same size (defensive implementation)
view = memoryview(buffer)
try:
while len(buffer) - offset >= MULTIPART_MIN_PART_SIZE_BYTES:
start = offset
end = offset + MULTIPART_MIN_PART_SIZE_BYTES
part_data = view[start:end]
offset = end
_LOGGER.debug(
"Uploading part number %d, size %d",
part_number,
len(part_data),
)
part = await cast(Any, self._client).upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=part_data.tobytes(),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
part_number += 1
finally:
view.release()
# Compact the buffer if the consumed offset has grown large enough. This
# avoids unnecessary memory copies when compacting after every part upload.
if offset and offset >= MULTIPART_MIN_PART_SIZE_BYTES:
buffer = bytearray(buffer[offset:])
offset = 0
# Upload the final buffer as the last part (no minimum size requirement)
if buffer:
# Offset should be 0 after the last compaction, but we use it as the start
# index to be defensive in case the buffer was not compacted.
if offset < len(buffer):
remaining_data = memoryview(buffer)[offset:]
_LOGGER.debug(
"Uploading final part number %d, size %d", part_number, buffer_size
"Uploading final part number %d, size %d",
part_number,
len(remaining_data),
)
part = await self._client.upload_part(
part = await cast(Any, self._client).upload_part(
Bucket=self._bucket,
Key=tar_filename,
PartNumber=part_number,
UploadId=upload_id,
Body=b"".join(buffer),
Body=remaining_data.tobytes(),
)
parts.append({"PartNumber": part_number, "ETag": part["ETag"]})
await self._client.complete_multipart_upload(
await cast(Any, self._client).complete_multipart_upload(
Bucket=self._bucket,
Key=tar_filename,
UploadId=upload_id,

View File

@@ -13,6 +13,6 @@
"integration_type": "system",
"iot_class": "cloud_push",
"loggers": ["acme", "hass_nabucasa", "snitun"],
"requirements": ["hass-nabucasa==1.13.0", "openai==2.15.0"],
"requirements": ["hass-nabucasa==1.13.0", "openai==2.21.0"],
"single_config_entry": true
}

View File

@@ -11,6 +11,7 @@ from homeassistant.components.binary_sensor import (
PLATFORM_SCHEMA as BINARY_SENSOR_PLATFORM_SCHEMA,
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.config_entries import SOURCE_IMPORT
from homeassistant.const import (
@@ -23,6 +24,7 @@ from homeassistant.const import (
CONF_PORT,
CONF_SSL,
CONF_USERNAME,
EntityCategory,
)
from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
@@ -42,29 +44,118 @@ CONF_IGNORED = "ignored"
DEFAULT_DELAY = 0
DEFAULT_IGNORED = False
# Device class mapping for Hikvision event types
DEVICE_CLASS_MAP: dict[str, BinarySensorDeviceClass | None] = {
"Motion": BinarySensorDeviceClass.MOTION,
"Line Crossing": BinarySensorDeviceClass.MOTION,
"Field Detection": BinarySensorDeviceClass.MOTION,
"Tamper Detection": BinarySensorDeviceClass.MOTION,
"Shelter Alarm": None,
"Disk Full": None,
"Disk Error": None,
"Net Interface Broken": BinarySensorDeviceClass.CONNECTIVITY,
"IP Conflict": BinarySensorDeviceClass.CONNECTIVITY,
"Illegal Access": None,
"Video Mismatch": None,
"Bad Video": None,
"PIR Alarm": BinarySensorDeviceClass.MOTION,
"Face Detection": BinarySensorDeviceClass.MOTION,
"Scene Change Detection": BinarySensorDeviceClass.MOTION,
"I/O": None,
"Unattended Baggage": BinarySensorDeviceClass.MOTION,
"Attended Baggage": BinarySensorDeviceClass.MOTION,
"Recording Failure": None,
"Exiting Region": BinarySensorDeviceClass.MOTION,
"Entering Region": BinarySensorDeviceClass.MOTION,
# Entity descriptions for known Hikvision event types
# The key matches the sensor_type from pyhik (the friendly name from SENSOR_MAP)
BINARY_SENSOR_DESCRIPTIONS: dict[str, BinarySensorEntityDescription] = {
"Motion": BinarySensorEntityDescription(
key="motion",
device_class=BinarySensorDeviceClass.MOTION,
),
"Line Crossing": BinarySensorEntityDescription(
key="line_crossing",
translation_key="line_crossing",
device_class=BinarySensorDeviceClass.MOTION,
),
"Field Detection": BinarySensorEntityDescription(
key="field_detection",
translation_key="field_detection",
device_class=BinarySensorDeviceClass.MOTION,
),
"Tamper Detection": BinarySensorEntityDescription(
key="tamper_detection",
device_class=BinarySensorDeviceClass.TAMPER,
),
"Shelter Alarm": BinarySensorEntityDescription(
key="shelter_alarm",
translation_key="shelter_alarm",
),
"Disk Full": BinarySensorEntityDescription(
key="disk_full",
translation_key="disk_full",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Disk Error": BinarySensorEntityDescription(
key="disk_error",
translation_key="disk_error",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Net Interface Broken": BinarySensorEntityDescription(
key="net_interface_broken",
translation_key="net_interface_broken",
device_class=BinarySensorDeviceClass.CONNECTIVITY,
entity_category=EntityCategory.DIAGNOSTIC,
),
"IP Conflict": BinarySensorEntityDescription(
key="ip_conflict",
translation_key="ip_conflict",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Illegal Access": BinarySensorEntityDescription(
key="illegal_access",
translation_key="illegal_access",
device_class=BinarySensorDeviceClass.SAFETY,
),
"Video Mismatch": BinarySensorEntityDescription(
key="video_mismatch",
translation_key="video_mismatch",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Bad Video": BinarySensorEntityDescription(
key="bad_video",
translation_key="bad_video",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"PIR Alarm": BinarySensorEntityDescription(
key="pir_alarm",
translation_key="pir_alarm",
device_class=BinarySensorDeviceClass.MOTION,
),
"Face Detection": BinarySensorEntityDescription(
key="face_detection",
translation_key="face_detection",
device_class=BinarySensorDeviceClass.MOTION,
),
"Scene Change Detection": BinarySensorEntityDescription(
key="scene_change_detection",
translation_key="scene_change_detection",
device_class=BinarySensorDeviceClass.MOTION,
),
"I/O": BinarySensorEntityDescription(
key="io",
translation_key="io",
),
"Unattended Baggage": BinarySensorEntityDescription(
key="unattended_baggage",
translation_key="unattended_baggage",
device_class=BinarySensorDeviceClass.MOTION,
),
"Attended Baggage": BinarySensorEntityDescription(
key="attended_baggage",
translation_key="attended_baggage",
device_class=BinarySensorDeviceClass.MOTION,
),
"Recording Failure": BinarySensorEntityDescription(
key="recording_failure",
translation_key="recording_failure",
device_class=BinarySensorDeviceClass.PROBLEM,
entity_category=EntityCategory.DIAGNOSTIC,
),
"Exiting Region": BinarySensorEntityDescription(
key="exiting_region",
translation_key="exiting_region",
device_class=BinarySensorDeviceClass.MOTION,
),
"Entering Region": BinarySensorEntityDescription(
key="entering_region",
translation_key="entering_region",
device_class=BinarySensorDeviceClass.MOTION,
),
}
_LOGGER = logging.getLogger(__name__)
@@ -158,13 +249,24 @@ async def async_setup_entry(
)
return
# Log warnings for unknown sensor types and skip them
for sensor_type in sensors:
if sensor_type not in BINARY_SENSOR_DESCRIPTIONS:
_LOGGER.warning(
"Unknown Hikvision sensor type '%s', please report this at "
"https://github.com/home-assistant/core/issues",
sensor_type,
)
async_add_entities(
HikvisionBinarySensor(
entry=entry,
description=BINARY_SENSOR_DESCRIPTIONS[sensor_type],
sensor_type=sensor_type,
channel=channel_info[1],
)
for sensor_type, channel_list in sensors.items()
if sensor_type in BINARY_SENSOR_DESCRIPTIONS
for channel_info in channel_list
)
@@ -177,20 +279,18 @@ class HikvisionBinarySensor(HikvisionEntity, BinarySensorEntity):
def __init__(
self,
entry: HikvisionConfigEntry,
description: BinarySensorEntityDescription,
sensor_type: str,
channel: int,
) -> None:
"""Initialize the binary sensor."""
super().__init__(entry, channel)
self.entity_description = description
self._sensor_type = sensor_type
# Build unique ID (includes sensor_type for uniqueness per sensor)
self._attr_unique_id = f"{self._data.device_id}_{sensor_type}_{channel}"
# Set entity name and device class
self._attr_name = sensor_type
self._attr_device_class = DEVICE_CLASS_MAP.get(sensor_type)
# Callback ID for pyhik
self._callback_id = f"{self._data.device_id}.{sensor_type}.{channel}"

View File

@@ -34,6 +34,67 @@
"name": "{device_name} channel {channel_number}"
}
},
"entity": {
"binary_sensor": {
"attended_baggage": {
"name": "Attended baggage"
},
"bad_video": {
"name": "Bad video"
},
"disk_error": {
"name": "Disk error"
},
"disk_full": {
"name": "Disk full"
},
"entering_region": {
"name": "Entering region"
},
"exiting_region": {
"name": "Exiting region"
},
"face_detection": {
"name": "Face detection"
},
"field_detection": {
"name": "Field detection"
},
"illegal_access": {
"name": "Illegal access"
},
"io": {
"name": "I/O alarm"
},
"ip_conflict": {
"name": "IP conflict"
},
"line_crossing": {
"name": "Line crossing"
},
"net_interface_broken": {
"name": "Network interface broken"
},
"pir_alarm": {
"name": "PIR alarm"
},
"recording_failure": {
"name": "Recording failure"
},
"scene_change_detection": {
"name": "Scene change detection"
},
"shelter_alarm": {
"name": "Shelter alarm"
},
"unattended_baggage": {
"name": "Unattended baggage"
},
"video_mismatch": {
"name": "Video mismatch"
}
}
},
"issues": {
"deprecated_yaml_import_issue": {
"description": "Configuring {integration_title} using YAML is deprecated and the import failed. Please remove the `{domain}` entry from your `configuration.yaml` file and set up the integration manually.",

View File

@@ -12,6 +12,7 @@ import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .const import DOMAIN
@@ -39,6 +40,7 @@ class HomevoltConfigFlow(ConfigFlow, domain=DOMAIN):
def __init__(self) -> None:
"""Initialize the config flow."""
self._host: str | None = None
self._need_password: bool = False
async def check_status(self, client: Homevolt) -> dict[str, str]:
"""Check connection status and return errors if any."""
@@ -156,3 +158,68 @@ class HomevoltConfigFlow(ConfigFlow, domain=DOMAIN):
errors=errors,
description_placeholders={"host": self._host},
)
async def async_step_zeroconf(
self, discovery_info: ZeroconfServiceInfo
) -> ConfigFlowResult:
"""Handle zeroconf discovery."""
self._host = discovery_info.host
self._async_abort_entries_match({CONF_HOST: self._host})
websession = async_get_clientsession(self.hass)
client = Homevolt(self._host, None, websession=websession)
errors = await self.check_status(client)
if errors.get("base") == "invalid_auth":
self._need_password = True
elif errors:
return self.async_abort(reason=errors["base"])
else:
await self.async_set_unique_id(client.unique_id)
self._abort_if_unique_id_configured(
updates={CONF_HOST: self._host},
)
return await self.async_step_zeroconf_confirm()
async def async_step_zeroconf_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm zeroconf discovery."""
assert self._host is not None
errors: dict[str, str] = {}
if user_input is None:
if self._need_password:
return self.async_show_form(
step_id="zeroconf_confirm",
data_schema=STEP_CREDENTIALS_DATA_SCHEMA,
errors=errors,
description_placeholders={"host": self._host},
)
self._set_confirm_only()
return self.async_show_form(
step_id="zeroconf_confirm",
description_placeholders={"host": self._host},
)
password: str | None = None
if self._need_password:
password = user_input[CONF_PASSWORD]
websession = async_get_clientsession(self.hass)
client = Homevolt(self._host, password, websession=websession)
errors = await self.check_status(client)
if errors:
return self.async_show_form(
step_id="zeroconf_confirm",
data_schema=STEP_CREDENTIALS_DATA_SCHEMA,
errors=errors,
description_placeholders={"host": self._host},
)
await self.async_set_unique_id(client.unique_id)
self._abort_if_unique_id_configured(updates={CONF_HOST: self._host})
return self.async_create_entry(
title="Homevolt",
data={CONF_HOST: self._host, CONF_PASSWORD: password},
)

View File

@@ -7,5 +7,11 @@
"integration_type": "device",
"iot_class": "local_polling",
"quality_scale": "bronze",
"requirements": ["homevolt==0.4.4"]
"requirements": ["homevolt==0.4.4"],
"zeroconf": [
{
"name": "homevolt*",
"type": "_http._tcp.local."
}
]
}

View File

@@ -44,8 +44,8 @@ rules:
# Gold
devices: done
diagnostics: done
discovery-update-info: todo
discovery: todo
discovery-update-info: done
discovery: done
docs-data-update: todo
docs-examples: todo
docs-known-limitations: todo
@@ -54,9 +54,9 @@ rules:
docs-troubleshooting: todo
docs-use-cases: todo
dynamic-devices: todo
entity-category: todo
entity-category: done
entity-device-class: done
entity-disabled-by-default: todo
entity-disabled-by-default: done
entity-translations: done
exception-translations: todo
icon-translations: todo

View File

@@ -2,7 +2,10 @@
"config": {
"abort": {
"already_configured": "[%key:common::config_flow::abort::already_configured_device%]",
"already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]",
"cannot_connect": "[%key:common::config_flow::error::cannot_connect%]",
"reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]",
"unknown": "[%key:common::config_flow::error::unknown%]",
"wrong_account": "The device you authenticated with is different from the one configured. Re-authenticate with the same Homevolt battery."
},
"error": {
@@ -38,6 +41,15 @@
"host": "The IP address or hostname of your Homevolt battery on your local network."
},
"description": "Connect Home Assistant to your Homevolt battery over the local network."
},
"zeroconf_confirm": {
"data": {
"password": "[%key:common::config_flow::data::password%]"
},
"data_description": {
"password": "[%key:component::homevolt::config::step::credentials::data_description::password%]"
},
"description": "Do you want to set up the Homevolt battery at {host}?"
}
}
},

View File

@@ -17,7 +17,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR]
PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH]
async def async_setup_entry(hass: HomeAssistant, entry: LiebherrConfigEntry) -> bool:

View File

@@ -0,0 +1,66 @@
{
"entity": {
"switch": {
"night_mode": {
"default": "mdi:sleep",
"state": {
"off": "mdi:sleep-off"
}
},
"party_mode": {
"default": "mdi:glass-cocktail",
"state": {
"off": "mdi:glass-cocktail-off"
}
},
"supercool": {
"default": "mdi:snowflake",
"state": {
"off": "mdi:snowflake-off"
}
},
"supercool_bottom_zone": {
"default": "mdi:snowflake",
"state": {
"off": "mdi:snowflake-off"
}
},
"supercool_middle_zone": {
"default": "mdi:snowflake",
"state": {
"off": "mdi:snowflake-off"
}
},
"supercool_top_zone": {
"default": "mdi:snowflake",
"state": {
"off": "mdi:snowflake-off"
}
},
"superfrost": {
"default": "mdi:snowflake-alert",
"state": {
"off": "mdi:snowflake-off"
}
},
"superfrost_bottom_zone": {
"default": "mdi:snowflake-alert",
"state": {
"off": "mdi:snowflake-off"
}
},
"superfrost_middle_zone": {
"default": "mdi:snowflake-alert",
"state": {
"off": "mdi:snowflake-off"
}
},
"superfrost_top_zone": {
"default": "mdi:snowflake-alert",
"state": {
"off": "mdi:snowflake-off"
}
}
}
}
}

View File

@@ -158,7 +158,8 @@ class LiebherrNumber(LiebherrZoneEntity, NumberEntity):
except (LiebherrConnectionError, LiebherrTimeoutError) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="set_temperature_failed",
translation_key="communication_error",
translation_placeholders={"error": str(err)},
) from err
await self.coordinator.async_request_refresh()

View File

@@ -55,11 +55,13 @@ rules:
docs-use-cases: done
dynamic-devices: todo
entity-category: done
entity-device-class: todo
entity-disabled-by-default: todo
entity-device-class: done
entity-disabled-by-default:
status: exempt
comment: This integration does not have any entities that should be disabled by default.
entity-translations: done
exception-translations: todo
icon-translations: todo
exception-translations: done
icon-translations: done
reconfiguration-flow:
status: exempt
comment: The only configuration option is the API key, which is handled by the reauthentication flow.

View File

@@ -57,11 +57,43 @@
"top_zone": {
"name": "Top zone"
}
},
"switch": {
"night_mode": {
"name": "Night mode"
},
"party_mode": {
"name": "Party mode"
},
"supercool": {
"name": "SuperCool"
},
"supercool_bottom_zone": {
"name": "Bottom zone SuperCool"
},
"supercool_middle_zone": {
"name": "Middle zone SuperCool"
},
"supercool_top_zone": {
"name": "Top zone SuperCool"
},
"superfrost": {
"name": "SuperFrost"
},
"superfrost_bottom_zone": {
"name": "Bottom zone SuperFrost"
},
"superfrost_middle_zone": {
"name": "Middle zone SuperFrost"
},
"superfrost_top_zone": {
"name": "Top zone SuperFrost"
}
}
},
"exceptions": {
"set_temperature_failed": {
"message": "Failed to set temperature"
"communication_error": {
"message": "An error occurred while communicating with the device: {error}"
}
}
}

View File

@@ -0,0 +1,255 @@
"""Switch platform for Liebherr integration."""
from __future__ import annotations
import asyncio
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
from typing import TYPE_CHECKING, Any
from pyliebherrhomeapi import (
LiebherrConnectionError,
LiebherrTimeoutError,
ToggleControl,
ZonePosition,
)
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN
from .coordinator import LiebherrConfigEntry, LiebherrCoordinator
from .entity import ZONE_POSITION_MAP, LiebherrEntity
PARALLEL_UPDATES = 1
REFRESH_DELAY = 5
# Control names from the API
CONTROL_SUPERCOOL = "supercool"
CONTROL_SUPERFROST = "superfrost"
CONTROL_PARTY_MODE = "partymode"
CONTROL_NIGHT_MODE = "nightmode"
@dataclass(frozen=True, kw_only=True)
class LiebherrSwitchEntityDescription(SwitchEntityDescription):
"""Base description for Liebherr switch entities."""
control_name: str
@dataclass(frozen=True, kw_only=True)
class LiebherrZoneSwitchEntityDescription(LiebherrSwitchEntityDescription):
"""Describes a Liebherr zone-based switch entity."""
set_fn: Callable[[LiebherrCoordinator, int, bool], Awaitable[None]]
@dataclass(frozen=True, kw_only=True)
class LiebherrDeviceSwitchEntityDescription(LiebherrSwitchEntityDescription):
"""Describes a Liebherr device-wide switch entity."""
set_fn: Callable[[LiebherrCoordinator, bool], Awaitable[None]]
ZONE_SWITCH_TYPES: dict[str, LiebherrZoneSwitchEntityDescription] = {
CONTROL_SUPERCOOL: LiebherrZoneSwitchEntityDescription(
key="supercool",
translation_key="supercool",
control_name=CONTROL_SUPERCOOL,
set_fn=lambda coordinator, zone_id, value: coordinator.client.set_supercool(
device_id=coordinator.device_id,
zone_id=zone_id,
value=value,
),
),
CONTROL_SUPERFROST: LiebherrZoneSwitchEntityDescription(
key="superfrost",
translation_key="superfrost",
control_name=CONTROL_SUPERFROST,
set_fn=lambda coordinator, zone_id, value: coordinator.client.set_superfrost(
device_id=coordinator.device_id,
zone_id=zone_id,
value=value,
),
),
}
DEVICE_SWITCH_TYPES: dict[str, LiebherrDeviceSwitchEntityDescription] = {
CONTROL_PARTY_MODE: LiebherrDeviceSwitchEntityDescription(
key="party_mode",
translation_key="party_mode",
control_name=CONTROL_PARTY_MODE,
set_fn=lambda coordinator, value: coordinator.client.set_party_mode(
device_id=coordinator.device_id,
value=value,
),
),
CONTROL_NIGHT_MODE: LiebherrDeviceSwitchEntityDescription(
key="night_mode",
translation_key="night_mode",
control_name=CONTROL_NIGHT_MODE,
set_fn=lambda coordinator, value: coordinator.client.set_night_mode(
device_id=coordinator.device_id,
value=value,
),
),
}
async def async_setup_entry(
hass: HomeAssistant,
entry: LiebherrConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Liebherr switch entities."""
entities: list[LiebherrDeviceSwitch | LiebherrZoneSwitch] = []
for coordinator in entry.runtime_data.values():
has_multiple_zones = len(coordinator.data.get_temperature_controls()) > 1
for control in coordinator.data.controls:
if not isinstance(control, ToggleControl):
continue
# Zone-based switches (SuperCool, SuperFrost)
if control.zone_id is not None and (
desc := ZONE_SWITCH_TYPES.get(control.name)
):
entities.append(
LiebherrZoneSwitch(
coordinator=coordinator,
description=desc,
zone_id=control.zone_id,
has_multiple_zones=has_multiple_zones,
)
)
# Device-wide switches (Party Mode, Night Mode)
elif device_desc := DEVICE_SWITCH_TYPES.get(control.name):
entities.append(
LiebherrDeviceSwitch(
coordinator=coordinator,
description=device_desc,
)
)
async_add_entities(entities)
class LiebherrDeviceSwitch(LiebherrEntity, SwitchEntity):
"""Representation of a device-wide Liebherr switch."""
entity_description: LiebherrSwitchEntityDescription
_zone_id: int | None = None
_optimistic_state: bool | None = None
def __init__(
self,
coordinator: LiebherrCoordinator,
description: LiebherrSwitchEntityDescription,
) -> None:
"""Initialize the device switch entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.device_id}_{description.key}"
@property
def _toggle_control(self) -> ToggleControl | None:
"""Get the toggle control for this entity."""
for control in self.coordinator.data.controls:
if (
isinstance(control, ToggleControl)
and control.name == self.entity_description.control_name
and (self._zone_id is None or control.zone_id == self._zone_id)
):
return control
return None
@property
def is_on(self) -> bool | None:
"""Return true if the switch is on."""
if self._optimistic_state is not None:
return self._optimistic_state
if TYPE_CHECKING:
assert self._toggle_control is not None
return self._toggle_control.value
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
self._optimistic_state = None
super()._handle_coordinator_update()
@property
def available(self) -> bool:
"""Return if entity is available."""
return super().available and self._toggle_control is not None
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self._async_set_value(True)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self._async_set_value(False)
async def _async_call_set_fn(self, value: bool) -> None:
"""Call the set function for this switch."""
if TYPE_CHECKING:
assert isinstance(
self.entity_description, LiebherrDeviceSwitchEntityDescription
)
await self.entity_description.set_fn(self.coordinator, value)
async def _async_set_value(self, value: bool) -> None:
"""Set the switch value."""
try:
await self._async_call_set_fn(value)
except (LiebherrConnectionError, LiebherrTimeoutError) as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="communication_error",
translation_placeholders={"error": str(err)},
) from err
# Track expected state locally to avoid mutating shared coordinator data
self._optimistic_state = value
self.async_write_ha_state()
await asyncio.sleep(REFRESH_DELAY)
await self.coordinator.async_request_refresh()
class LiebherrZoneSwitch(LiebherrDeviceSwitch):
"""Representation of a zone-based Liebherr switch."""
entity_description: LiebherrZoneSwitchEntityDescription
_zone_id: int
def __init__(
self,
coordinator: LiebherrCoordinator,
description: LiebherrZoneSwitchEntityDescription,
zone_id: int,
has_multiple_zones: bool,
) -> None:
"""Initialize the zone switch entity."""
super().__init__(coordinator, description)
self._zone_id = zone_id
self._attr_unique_id = f"{coordinator.device_id}_{description.key}_{zone_id}"
# Add zone suffix only for multi-zone devices
if has_multiple_zones:
temp_controls = coordinator.data.get_temperature_controls()
if (
(tc := temp_controls.get(zone_id))
and isinstance(tc.zone_position, ZonePosition)
and (zone_key := ZONE_POSITION_MAP.get(tc.zone_position))
):
self._attr_translation_key = f"{description.translation_key}_{zone_key}"
async def _async_call_set_fn(self, value: bool) -> None:
"""Call the set function for this zone switch."""
await self.entity_description.set_fn(self.coordinator, self._zone_id, value)

View File

@@ -8,6 +8,9 @@ BEDTIME_ALARM_MIN = "16:00"
BEDTIME_ALARM_MAX = "23:00"
BEDTIME_ALARM_DISABLE = "00:00"
BEDTIME_END_TIME_MIN = "05:00"
BEDTIME_END_TIME_MAX = "09:00"
APP_SETUP_URL = (
"https://www.nintendo.com/my/support/switch/parentalcontrols/app/setup.html"
)

View File

@@ -65,6 +65,9 @@
"time": {
"bedtime_alarm": {
"name": "Bedtime alarm"
},
"bedtime_end_time": {
"name": "Bedtime end time"
}
}
},
@@ -75,6 +78,9 @@
"bedtime_alarm_out_of_range": {
"message": "{value} not accepted. Bedtime Alarm must be between {bedtime_alarm_min} and {bedtime_alarm_max}. To disable, set to {bedtime_alarm_disable}."
},
"bedtime_end_time_out_of_range": {
"message": "{value} not accepted. Bedtime End Time must be between {bedtime_end_time_min} and {bedtime_end_time_max}. To disable, set to {bedtime_alarm_disable}."
},
"config_entry_not_found": {
"message": "Config entry not found."
},

View File

@@ -16,7 +16,14 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ServiceValidationError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import BEDTIME_ALARM_DISABLE, BEDTIME_ALARM_MAX, BEDTIME_ALARM_MIN, DOMAIN
from .const import (
BEDTIME_ALARM_DISABLE,
BEDTIME_ALARM_MAX,
BEDTIME_ALARM_MIN,
BEDTIME_END_TIME_MAX,
BEDTIME_END_TIME_MIN,
DOMAIN,
)
from .coordinator import NintendoParentalControlsConfigEntry, NintendoUpdateCoordinator
from .entity import Device, NintendoDevice
@@ -30,6 +37,7 @@ class NintendoParentalControlsTime(StrEnum):
"""Store keys for Nintendo Parental time."""
BEDTIME_ALARM = "bedtime_alarm"
BEDTIME_END_TIME = "bedtime_end_time"
@dataclass(kw_only=True, frozen=True)
@@ -47,6 +55,12 @@ TIME_DESCRIPTIONS: tuple[NintendoParentalControlsTimeEntityDescription, ...] = (
value_fn=lambda device: device.bedtime_alarm,
set_value_fn=lambda device, value: device.set_bedtime_alarm(value=value),
),
NintendoParentalControlsTimeEntityDescription(
key=NintendoParentalControlsTime.BEDTIME_END_TIME,
translation_key=NintendoParentalControlsTime.BEDTIME_END_TIME,
value_fn=lambda device: device.bedtime_end,
set_value_fn=lambda device, value: device.set_bedtime_end_time(value=value),
),
)
@@ -88,6 +102,20 @@ class NintendoParentalControlsTimeEntity(NintendoDevice, TimeEntity):
try:
await self.entity_description.set_value_fn(self._device, value)
except BedtimeOutOfRangeError as exc:
if (
self.entity_description.key
== NintendoParentalControlsTime.BEDTIME_END_TIME
):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="bedtime_end_time_out_of_range",
translation_placeholders={
"value": value.strftime("%H:%M"),
"bedtime_end_time_max": BEDTIME_END_TIME_MAX,
"bedtime_end_time_min": BEDTIME_END_TIME_MIN,
"bedtime_alarm_disable": BEDTIME_ALARM_DISABLE,
},
) from exc
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="bedtime_alarm_out_of_range",

View File

@@ -290,7 +290,7 @@ class NumberDeviceClass(StrEnum):
OZONE = "ozone"
"""Amount of O3.
Unit of measurement: `ppb` (parts per billion), `μg/m³`
Unit of measurement: `ppb` (parts per billion), `ppm` (parts per million), `μg/m³`
"""
PH = "ph"
@@ -553,6 +553,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = {
NumberDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
NumberDeviceClass.OZONE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
NumberDeviceClass.PH: {None},

View File

@@ -9,5 +9,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["openai==2.15.0", "python-open-router==0.3.3"]
"requirements": ["openai==2.21.0", "python-open-router==0.3.3"]
}

View File

@@ -9,5 +9,5 @@
"integration_type": "service",
"iot_class": "cloud_polling",
"quality_scale": "bronze",
"requirements": ["openai==2.15.0"]
"requirements": ["openai==2.21.0"]
}

View File

@@ -304,7 +304,7 @@ class SensorDeviceClass(StrEnum):
OZONE = "ozone"
"""Amount of O3.
Unit of measurement: `ppb` (parts per billion),`μg/m³`
Unit of measurement: `ppb` (parts per billion), `ppm` (parts per million), `μg/m³`
"""
PH = "ph"
@@ -648,6 +648,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = {
SensorDeviceClass.NITROUS_OXIDE: {CONCENTRATION_MICROGRAMS_PER_CUBIC_METER},
SensorDeviceClass.OZONE: {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
},
SensorDeviceClass.PH: {None},

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from dataclasses import dataclass, field
from dataclasses import dataclass
import logging
import aiohttp
@@ -23,7 +23,7 @@ from homeassistant.helpers.typing import ConfigType
from homeassistant.util import dt as dt_util, ssl as ssl_util
from .const import AUTH_IMPLEMENTATION, DATA_HASS_CONFIG, DOMAIN, TibberConfigEntry
from .coordinator import TibberDataAPICoordinator
from .coordinator import TibberDataAPICoordinator, TibberDataCoordinator
from .services import async_setup_services
PLATFORMS = [Platform.BINARY_SENSOR, Platform.NOTIFY, Platform.SENSOR]
@@ -38,7 +38,8 @@ class TibberRuntimeData:
"""Runtime data for Tibber API entries."""
session: OAuth2Session
data_api_coordinator: TibberDataAPICoordinator | None = field(default=None)
data_api_coordinator: TibberDataAPICoordinator
data_coordinator: TibberDataCoordinator
_client: tibber.Tibber | None = None
async def async_get_client(self, hass: HomeAssistant) -> tibber.Tibber:
@@ -100,8 +101,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
except ClientError as err:
raise ConfigEntryNotReady from err
data_api_coordinator = TibberDataAPICoordinator(hass, entry)
data_coordinator = TibberDataCoordinator(hass, entry)
entry.runtime_data = TibberRuntimeData(
session=session,
data_api_coordinator=data_api_coordinator,
data_coordinator=data_coordinator,
)
tibber_connection = await entry.runtime_data.async_get_client(hass)
@@ -124,9 +129,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: TibberConfigEntry) -> bo
except tibber.FatalHttpExceptionError as err:
raise ConfigEntryNotReady("Fatal HTTP error from Tibber API") from err
coordinator = TibberDataAPICoordinator(hass, entry)
await coordinator.async_config_entry_first_refresh()
entry.runtime_data.data_api_coordinator = coordinator
await data_api_coordinator.async_config_entry_first_refresh()
await data_coordinator.async_config_entry_first_refresh()
await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS)
return True

View File

@@ -2,9 +2,11 @@
from __future__ import annotations
from datetime import timedelta
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime, timedelta
import logging
from typing import TYPE_CHECKING, cast
from typing import TYPE_CHECKING, Any, cast
from aiohttp.client_exceptions import ClientError
import tibber
@@ -21,9 +23,11 @@ from homeassistant.components.recorder.statistics import (
get_last_statistics,
statistics_during_period,
)
from homeassistant.const import UnitOfEnergy
from homeassistant.core import HomeAssistant
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EVENT_HOMEASSISTANT_STOP, UnitOfEnergy
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from homeassistant.util import dt as dt_util
from homeassistant.util.unit_conversion import EnergyConverter
@@ -31,6 +35,8 @@ from homeassistant.util.unit_conversion import EnergyConverter
from .const import DOMAIN
if TYPE_CHECKING:
from tibber import TibberHome
from .const import TibberConfigEntry
FIVE_YEARS = 5 * 365 * 24
@@ -38,7 +44,55 @@ FIVE_YEARS = 5 * 365 * 24
_LOGGER = logging.getLogger(__name__)
class TibberDataCoordinator(DataUpdateCoordinator[None]):
@dataclass
class TibberHomeData:
"""Structured data per Tibber home from GraphQL and price API."""
currency: str
price_unit: str
current_price: float | None
current_price_time: datetime | None
intraday_price_ranking: float | None
max_price: float
avg_price: float
min_price: float
off_peak_1: float
peak: float
off_peak_2: float
month_cost: float | None
peak_hour: float | None
peak_hour_time: datetime | None
month_cons: float | None
def __getitem__(self, key: str) -> Any:
"""Return attribute by name, or None if missing."""
return self.__dict__.get(key)
def _build_home_data(home: TibberHome) -> TibberHomeData:
"""Build TibberHomeData from a TibberHome after price info has been fetched."""
price_value, price_time, price_rank = home.current_price_data()
attrs = home.current_attributes()
return TibberHomeData(
currency=home.currency,
price_unit=home.price_unit,
current_price=price_value,
current_price_time=price_time,
intraday_price_ranking=price_rank,
max_price=attrs.get("max_price", 0.0),
avg_price=attrs.get("avg_price", 0.0),
min_price=attrs.get("min_price", 0.0),
off_peak_1=attrs.get("off_peak_1", 0.0),
peak=attrs.get("peak", 0.0),
off_peak_2=attrs.get("off_peak_2", 0.0),
month_cost=getattr(home, "month_cost", None),
peak_hour=getattr(home, "peak_hour", None),
peak_hour_time=getattr(home, "peak_hour_time", None),
month_cons=getattr(home, "month_cons", None),
)
class TibberDataCoordinator(DataUpdateCoordinator[dict[str, TibberHomeData]]):
"""Handle Tibber data and insert statistics."""
config_entry: TibberConfigEntry
@@ -46,36 +100,84 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]):
def __init__(
self,
hass: HomeAssistant,
config_entry: TibberConfigEntry,
tibber_connection: tibber.Tibber,
entry: TibberConfigEntry,
) -> None:
"""Initialize the data handler."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=f"Tibber {tibber_connection.name}",
update_interval=timedelta(minutes=20),
config_entry=entry,
name="Tibber",
)
self._tibber_connection = tibber_connection
self._listener_unsub: Callable[[], None] | None = None
async def _async_update_data(self) -> None:
"""Update data via API."""
def _get_next_15_interval(self) -> datetime:
"""Return the next 15-minute boundary (minutes 0, 15, 30, 45) in UTC."""
next_run = dt_util.utcnow() + timedelta(minutes=15)
next_minute = (next_run.minute // 15) * 15
return next_run.replace(
minute=next_minute, second=0, microsecond=0, tzinfo=dt_util.UTC
)
@callback
def _on_scheduled_refresh(self, _fire_time: datetime) -> None:
"""Run the scheduled refresh (same contract as base refresh interval)."""
self.config_entry.async_create_background_task(
self.hass,
self._handle_refresh_interval(),
name=f"{self.name} - {self.config_entry.title} - refresh",
eager_start=True,
)
@callback
def _schedule_refresh(self) -> None:
"""Schedule a refresh at the next 15-minute boundary."""
if self.config_entry.pref_disable_polling:
return
self._async_unsub_refresh()
self._unsub_refresh = async_track_point_in_utc_time(
self.hass,
self._on_scheduled_refresh,
self._get_next_15_interval(),
)
async def _async_update_data(self) -> dict[str, TibberHomeData]:
"""Update data via API and return per-home data for sensors."""
_LOGGER.error("Updating data")
tibber_connection = await self.config_entry.runtime_data.async_get_client(
self.hass
)
try:
await self._tibber_connection.fetch_consumption_data_active_homes()
await self._tibber_connection.fetch_production_data_active_homes()
await self._insert_statistics()
await tibber_connection.fetch_consumption_data_active_homes()
await tibber_connection.fetch_production_data_active_homes()
now = dt_util.now()
for home in tibber_connection.get_homes(only_active=True):
update_needed = False
last_data_timestamp = home.last_data_timestamp
if last_data_timestamp is None:
update_needed = True
else:
remaining_seconds = (last_data_timestamp - now).total_seconds()
if remaining_seconds < 11 * 3600:
update_needed = True
if update_needed:
await home.update_info_and_price_info()
await self._insert_statistics(tibber_connection)
except tibber.RetryableHttpExceptionError as err:
raise UpdateFailed(f"Error communicating with API ({err.status})") from err
except tibber.FatalHttpExceptionError:
# Fatal error. Reload config entry to show correct error.
self.hass.async_create_task(
self.hass.config_entries.async_reload(self.config_entry.entry_id)
)
except tibber.FatalHttpExceptionError as err:
raise UpdateFailed(f"Error communicating with API ({err.status})") from err
async def _insert_statistics(self) -> None:
result: dict[str, TibberHomeData] = {}
for home in tibber_connection.get_homes(only_active=True):
result[home.home_id] = _build_home_data(home)
return result
async def _insert_statistics(self, tibber_connection: tibber.Tibber) -> None:
"""Insert Tibber statistics."""
for home in self._tibber_connection.get_homes():
for home in tibber_connection.get_homes():
sensors: list[tuple[str, bool, str | None, str]] = []
if home.hourly_consumption_data:
sensors.append(
@@ -212,7 +314,6 @@ class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
update_interval=timedelta(minutes=1),
config_entry=entry,
)
self._runtime_data = entry.runtime_data
self.sensors_by_device: dict[str, dict[str, tibber.data_api.Sensor]] = {}
def _build_sensor_lookup(self, devices: dict[str, TibberDevice]) -> None:
@@ -233,7 +334,7 @@ class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
async def _async_get_client(self) -> tibber.Tibber:
"""Get the Tibber client with error handling."""
try:
return await self._runtime_data.async_get_client(self.hass)
return await self.config_entry.runtime_data.async_get_client(self.hass)
except ConfigEntryAuthFailed:
raise
except (ClientError, TimeoutError, tibber.UserAgentMissingError) as err:
@@ -257,3 +358,48 @@ class TibberDataAPICoordinator(DataUpdateCoordinator[dict[str, TibberDevice]]):
) from err
self._build_sensor_lookup(devices)
return devices
class TibberRtDataCoordinator(DataUpdateCoordinator[dict[str, Any]]):
"""Handle Tibber realtime data."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
add_sensor_callback: Callable[[TibberRtDataCoordinator, Any], None],
tibber_home: TibberHome,
) -> None:
"""Initialize the data handler."""
self._add_sensor_callback = add_sensor_callback
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=tibber_home.info["viewer"]["home"]["address"].get(
"address1", "Tibber"
),
)
self._async_remove_device_updates_handler = self.async_add_listener(
self._data_updated
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop)
@callback
def _handle_ha_stop(self, _event: Event) -> None:
"""Handle Home Assistant stopping."""
self._async_remove_device_updates_handler()
@callback
def _data_updated(self) -> None:
"""Triggered when data is updated."""
if live_measurement := self.get_live_measurement():
self._add_sensor_callback(self, live_measurement)
def get_live_measurement(self) -> Any:
"""Get live measurement data."""
if errors := self.data.get("errors"):
_LOGGER.error(errors[0])
return None
return self.data.get("data", {}).get("liveMeasurement")

View File

@@ -0,0 +1,117 @@
"""Shared entity base for Tibber sensors."""
from __future__ import annotations
from datetime import datetime, timedelta
from typing import TYPE_CHECKING, cast
from homeassistant.components.sensor import SensorEntityDescription
from homeassistant.core import callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.util import dt as dt_util
from .const import DOMAIN
from .coordinator import TibberDataCoordinator, TibberHomeData, TibberRtDataCoordinator
if TYPE_CHECKING:
from tibber import TibberHome
class TibberCoordinatorEntity(CoordinatorEntity[TibberDataCoordinator]):
"""Base entity for Tibber sensors using TibberDataCoordinator."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: TibberDataCoordinator,
tibber_home: TibberHome,
) -> None:
"""Initialize the entity."""
super().__init__(coordinator)
self._tibber_home = tibber_home
self._home_name: str = tibber_home.name or tibber_home.home_id
self._device_name: str = self._home_name
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._tibber_home.home_id)},
name=self._device_name,
model="Tibber Pulse",
)
def _get_home_data(self) -> TibberHomeData | None:
"""Return cached home data from the coordinator."""
data = cast(dict[str, TibberHomeData] | None, self.coordinator.data)
if data is None:
return None
return data.get(self._tibber_home.home_id)
class TibberRTCoordinatorEntity(CoordinatorEntity[TibberRtDataCoordinator]):
"""Representation of a Tibber sensor for real time consumption."""
_attr_has_entity_name = True
def __init__(
self,
tibber_home: TibberHome,
description: SensorEntityDescription,
initial_state: float,
coordinator: TibberRtDataCoordinator,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self._tibber_home = tibber_home
self._home_name: str = tibber_home.name or tibber_home.home_id
model: str = "Tibber Pulse"
self._device_name: str = f"{model} {self._home_name}"
self.entity_description = description
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self._tibber_home.home_id)},
name=self._device_name,
model=model,
)
self._attr_native_value = initial_state
self._attr_last_reset: datetime | None = None
self._attr_unique_id = f"{self._tibber_home.home_id}_rt_{description.key}"
if description.key in ("accumulatedCost", "accumulatedReward"):
self._attr_native_unit_of_measurement = tibber_home.currency
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._tibber_home.rt_subscription_running
@callback
def _handle_coordinator_update(self) -> None:
if not (live_measurement := self.coordinator.get_live_measurement()):
return
state = live_measurement.get(self.entity_description.key)
if state is None:
return
if self.entity_description.key in (
"accumulatedConsumption",
"accumulatedProduction",
):
# Value is reset to 0 at midnight, but not always strictly increasing
# due to hourly corrections.
# If device is offline, last_reset should be updated when it comes
# back online if the value has decreased
ts_local = dt_util.parse_datetime(live_measurement["timestamp"])
if ts_local is not None:
if self._attr_last_reset is None or (
state < 0.5 * self._attr_native_value
and (
ts_local.hour == 0
or (ts_local - self._attr_last_reset) > timedelta(hours=24)
)
):
self._attr_last_reset = dt_util.as_utc(
ts_local.replace(hour=0, minute=0, second=0, microsecond=0)
)
if self.entity_description.key == "powerFactor":
state *= 100.0
self._attr_native_value = state
self.async_write_ha_state()

View File

@@ -2,12 +2,8 @@
from __future__ import annotations
from collections.abc import Callable
import datetime
from datetime import timedelta
import logging
from random import randrange
from typing import Any
from typing import Any, cast
import aiohttp
from tibber import FatalHttpExceptionError, RetryableHttpExceptionError, TibberHome
@@ -19,9 +15,7 @@ from homeassistant.components.sensor import (
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
EVENT_HOMEASSISTANT_STOP,
PERCENTAGE,
SIGNAL_STRENGTH_DECIBELS,
EntityCategory,
@@ -32,28 +26,25 @@ from homeassistant.const import (
UnitOfPower,
UnitOfTemperature,
)
from homeassistant.core import Event, HomeAssistant, callback
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
)
from homeassistant.util import Throttle, dt as dt_util
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, MANUFACTURER, TibberConfigEntry
from .coordinator import TibberDataAPICoordinator, TibberDataCoordinator
from .const import DOMAIN, TibberConfigEntry
from .coordinator import (
TibberDataAPICoordinator,
TibberDataCoordinator,
TibberRtDataCoordinator,
)
from .entity import TibberCoordinatorEntity, TibberRTCoordinatorEntity
_LOGGER = logging.getLogger(__name__)
ICON = "mdi:currency-usd"
SCAN_INTERVAL = timedelta(minutes=1)
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=5)
PARALLEL_UPDATES = 0
TWENTY_MINUTES = 20 * 60
RT_SENSORS_UNIQUE_ID_MIGRATION = {
"accumulated_consumption_last_hour": "accumulated consumption current hour",
@@ -260,9 +251,56 @@ SENSORS: tuple[SensorEntityDescription, ...] = (
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
state_class=SensorStateClass.TOTAL_INCREASING,
),
SensorEntityDescription(
key="current_price",
translation_key="electricity_price",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=3,
),
SensorEntityDescription(
key="max_price",
translation_key="max_price",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=3,
),
SensorEntityDescription(
key="avg_price",
translation_key="avg_price",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=3,
),
SensorEntityDescription(
key="min_price",
translation_key="min_price",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=3,
),
SensorEntityDescription(
key="off_peak_1",
translation_key="off_peak_1",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=3,
),
SensorEntityDescription(
key="peak",
translation_key="peak",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=3,
),
SensorEntityDescription(
key="off_peak_2",
translation_key="off_peak_2",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=3,
),
SensorEntityDescription(
key="intraday_price_ranking",
translation_key="intraday_price_ranking",
state_class=SensorStateClass.MEASUREMENT,
suggested_display_precision=2,
),
)
DATA_API_SENSORS: tuple[SensorEntityDescription, ...] = (
SensorEntityDescription(
key="cellular.rssi",
@@ -609,7 +647,7 @@ async def _async_setup_graphql_sensors(
entity_registry = er.async_get(hass)
coordinator: TibberDataCoordinator | None = None
coordinator: TibberDataCoordinator = entry.runtime_data.data_coordinator
entities: list[TibberSensor] = []
for home in tibber_connection.get_homes(only_active=False):
try:
@@ -626,13 +664,7 @@ async def _async_setup_graphql_sensors(
raise PlatformNotReady from err
if home.has_active_subscription:
entities.append(TibberSensorElPrice(home))
if coordinator is None:
coordinator = TibberDataCoordinator(hass, entry, tibber_connection)
entities.extend(
TibberDataSensor(home, coordinator, entity_description)
for entity_description in SENSORS
)
entities.extend(TibberSensor(home, coordinator, desc) for desc in SENSORS)
if home.has_real_time_consumption:
entity_creator = TibberRtEntityCreator(
@@ -657,8 +689,6 @@ def _setup_data_api_sensors(
"""Set up sensors backed by the Tibber Data API."""
coordinator = entry.runtime_data.data_api_coordinator
if coordinator is None:
return
entities: list[TibberDataAPISensor] = []
api_sensors = {sensor.key: sensor for sensor in DATA_API_SENSORS}
@@ -707,116 +737,8 @@ class TibberDataAPISensor(CoordinatorEntity[TibberDataAPICoordinator], SensorEnt
return sensor.value if sensor else None
class TibberSensor(SensorEntity):
"""Representation of a generic Tibber sensor."""
_attr_has_entity_name = True
def __init__(self, *args: Any, tibber_home: TibberHome, **kwargs: Any) -> None:
"""Initialize the sensor."""
super().__init__(*args, **kwargs)
self._tibber_home = tibber_home
self._home_name = tibber_home.info["viewer"]["home"]["appNickname"]
if self._home_name is None:
self._home_name = tibber_home.info["viewer"]["home"]["address"].get(
"address1", ""
)
self._device_name: str | None = None
self._model: str | None = None
@property
def device_info(self) -> DeviceInfo:
"""Return the device_info of the device."""
device_info = DeviceInfo(
identifiers={(DOMAIN, self._tibber_home.home_id)},
name=self._device_name,
manufacturer=MANUFACTURER,
)
if self._model is not None:
device_info["model"] = self._model
return device_info
class TibberSensorElPrice(TibberSensor):
"""Representation of a Tibber sensor for el price."""
_attr_state_class = SensorStateClass.MEASUREMENT
_attr_translation_key = "electricity_price"
def __init__(self, tibber_home: TibberHome) -> None:
"""Initialize the sensor."""
super().__init__(tibber_home=tibber_home)
self._last_updated: datetime.datetime | None = None
self._spread_load_constant = randrange(TWENTY_MINUTES)
self._attr_available = False
self._attr_extra_state_attributes = {
"app_nickname": None,
"grid_company": None,
"estimated_annual_consumption": None,
"max_price": None,
"avg_price": None,
"min_price": None,
"off_peak_1": None,
"peak": None,
"off_peak_2": None,
"intraday_price_ranking": None,
}
self._attr_icon = ICON
self._attr_unique_id = self._tibber_home.home_id
self._model = "Price Sensor"
self._device_name = self._home_name
async def async_update(self) -> None:
"""Get the latest data and updates the states."""
now = dt_util.now()
if (
not self._tibber_home.last_data_timestamp
or (self._tibber_home.last_data_timestamp - now).total_seconds()
< 10 * 3600 - self._spread_load_constant
or not self.available
):
_LOGGER.debug("Asking for new data")
await self._fetch_data()
elif (
self._tibber_home.price_total
and self._last_updated
and self._last_updated.hour == now.hour
and now - self._last_updated < timedelta(minutes=15)
and self._tibber_home.last_data_timestamp
):
return
res = self._tibber_home.current_price_data()
self._attr_native_value, self._last_updated, price_rank = res
self._attr_extra_state_attributes["intraday_price_ranking"] = price_rank
attrs = self._tibber_home.current_attributes()
self._attr_extra_state_attributes.update(attrs)
self._attr_available = self._attr_native_value is not None
self._attr_native_unit_of_measurement = self._tibber_home.price_unit
@Throttle(MIN_TIME_BETWEEN_UPDATES)
async def _fetch_data(self) -> None:
_LOGGER.debug("Fetching data")
try:
await self._tibber_home.update_info_and_price_info()
except TimeoutError, aiohttp.ClientError:
return
data = self._tibber_home.info["viewer"]["home"]
self._attr_extra_state_attributes["app_nickname"] = data["appNickname"]
self._attr_extra_state_attributes["grid_company"] = data["meteringPointData"][
"gridCompany"
]
self._attr_extra_state_attributes["estimated_annual_consumption"] = data[
"meteringPointData"
]["estimatedAnnualConsumption"]
class TibberDataSensor(TibberSensor, CoordinatorEntity[TibberDataCoordinator]):
"""Representation of a Tibber sensor."""
class TibberSensor(TibberCoordinatorEntity, SensorEntity):
"""Representation of a Tibber sensor reading from coordinator data."""
def __init__(
self,
@@ -827,80 +749,55 @@ class TibberDataSensor(TibberSensor, CoordinatorEntity[TibberDataCoordinator]):
"""Initialize the sensor."""
super().__init__(coordinator=coordinator, tibber_home=tibber_home)
self.entity_description = entity_description
self._attr_unique_id = (
f"{self._tibber_home.home_id}_{self.entity_description.key}"
)
if entity_description.key == "month_cost":
self._attr_native_unit_of_measurement = self._tibber_home.currency
if self.entity_description.key == "current_price":
# Preserve the existing unique ID for the electricity price
# entity to avoid breaking user setups.
self._attr_unique_id = self._tibber_home.home_id
else:
self._attr_unique_id = (
f"{self._tibber_home.home_id}_{self.entity_description.key}"
)
self._device_name = self._home_name
@property
def native_value(self) -> StateType:
"""Return the value of the sensor."""
return getattr(self._tibber_home, self.entity_description.key) # type: ignore[no-any-return]
class TibberSensorRT(TibberSensor, CoordinatorEntity["TibberRtDataCoordinator"]):
"""Representation of a Tibber sensor for real time consumption."""
def __init__(
self,
tibber_home: TibberHome,
description: SensorEntityDescription,
initial_state: float,
coordinator: TibberRtDataCoordinator,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator=coordinator, tibber_home=tibber_home)
self.entity_description = description
self._model = "Tibber Pulse"
self._device_name = f"{self._model} {self._home_name}"
self._attr_native_value = initial_state
self._attr_unique_id = f"{self._tibber_home.home_id}_rt_{description.key}"
if description.key in ("accumulatedCost", "accumulatedReward"):
self._attr_native_unit_of_measurement = tibber_home.currency
def available(self) -> bool:
"""Return whether the sensor is available."""
return super().available and self._get_home_data() is not None
@property
def available(self) -> bool:
"""Return True if entity is available."""
return self._tibber_home.rt_subscription_running
def native_value(self) -> StateType:
"""Return the value of the sensor from coordinator data."""
_LOGGER.error("native_value: %s", self.entity_description.key)
home_data = self._get_home_data()
if home_data is None:
return None
return cast(StateType, home_data[self.entity_description.key])
@callback
def _handle_coordinator_update(self) -> None:
if not (live_measurement := self.coordinator.get_live_measurement()):
return
state = live_measurement.get(self.entity_description.key)
if state is None:
return
if self.entity_description.key in (
"accumulatedConsumption",
"accumulatedProduction",
@property
def native_unit_of_measurement(self) -> str | None:
"""Return the unit from coordinator data for monetary sensors."""
if (
self.entity_description.device_class == SensorDeviceClass.MONETARY
or self.entity_description.key
in (
"current_price",
"max_price",
"avg_price",
"min_price",
"off_peak_1",
"peak",
"off_peak_2",
)
):
# Value is reset to 0 at midnight, but not always strictly increasing
# due to hourly corrections.
# If device is offline, last_reset should be updated when it comes
# back online if the value has decreased
ts_local = dt_util.parse_datetime(live_measurement["timestamp"])
if ts_local is not None:
if self.last_reset is None or (
# native_value is float
state < 0.5 * self.native_value # type: ignore[operator]
and (
ts_local.hour == 0
or (ts_local - self.last_reset) > timedelta(hours=24)
)
):
self._attr_last_reset = dt_util.as_utc(
ts_local.replace(hour=0, minute=0, second=0, microsecond=0)
)
if self.entity_description.key == "powerFactor":
state *= 100.0
self._attr_native_value = state
self.async_write_ha_state()
home_data = self._get_home_data()
if home_data is None:
return None
return (
home_data.currency
if self.entity_description.device_class == SensorDeviceClass.MONETARY
else home_data.price_unit
)
return self.entity_description.native_unit_of_measurement
class TibberRtEntityCreator:
@@ -975,7 +872,7 @@ class TibberRtEntityCreator:
continue
self._migrate_unique_id(sensor_description)
entity = TibberSensorRT(
entity = TibberRTCoordinatorEntity(
self._tibber_home,
sensor_description,
state,
@@ -985,48 +882,3 @@ class TibberRtEntityCreator:
self._added_sensors.add(sensor_description.key)
if new_entities:
self._async_add_entities(new_entities)
class TibberRtDataCoordinator(DataUpdateCoordinator): # pylint: disable=hass-enforce-class-module
"""Handle Tibber realtime data."""
def __init__(
self,
hass: HomeAssistant,
config_entry: ConfigEntry,
add_sensor_callback: Callable[[TibberRtDataCoordinator, Any], None],
tibber_home: TibberHome,
) -> None:
"""Initialize the data handler."""
self._add_sensor_callback = add_sensor_callback
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=tibber_home.info["viewer"]["home"]["address"].get(
"address1", "Tibber"
),
)
self._async_remove_device_updates_handler = self.async_add_listener(
self._data_updated
)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop)
@callback
def _handle_ha_stop(self, _event: Event) -> None:
"""Handle Home Assistant stopping."""
self._async_remove_device_updates_handler()
@callback
def _data_updated(self) -> None:
"""Triggered when data is updated."""
if live_measurement := self.get_live_measurement():
self._add_sensor_callback(self, live_measurement)
def get_live_measurement(self) -> Any:
"""Get live measurement data."""
if errors := self.data.get("errors"):
_LOGGER.error(errors[0])
return None
return self.data.get("data", {}).get("liveMeasurement")

View File

@@ -43,6 +43,9 @@
"average_power": {
"name": "Average power"
},
"avg_price": {
"name": "Average price today"
},
"cellular_rssi": {
"name": "Cellular signal strength"
},
@@ -136,6 +139,9 @@
"grid_phase_count": {
"name": "Number of grid phases"
},
"intraday_price_ranking": {
"name": "Intraday price ranking"
},
"last_meter_consumption": {
"name": "Last meter consumption"
},
@@ -145,15 +151,30 @@
"max_power": {
"name": "Max power"
},
"max_price": {
"name": "Max price today"
},
"min_power": {
"name": "Min power"
},
"min_price": {
"name": "Min price today"
},
"month_cons": {
"name": "Monthly net consumption"
},
"month_cost": {
"name": "Monthly cost"
},
"off_peak_1": {
"name": "Off-peak 1 average"
},
"off_peak_2": {
"name": "Off-peak 2 average"
},
"peak": {
"name": "Peak average"
},
"peak_hour": {
"name": "Monthly peak hour consumption"
},

View File

@@ -11,5 +11,6 @@ PLATFORMS = [
Platform.COVER,
Platform.LIGHT,
Platform.SCENE,
Platform.SWITCH,
]
LOGGER = getLogger(__package__)

View File

@@ -0,0 +1,53 @@
"""Support for Velux switches."""
from __future__ import annotations
from typing import Any
from pyvlx import OnOffSwitch
from homeassistant.components.switch import SwitchEntity
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import VeluxConfigEntry
from .entity import VeluxEntity, wrap_pyvlx_call_exceptions
PARALLEL_UPDATES = 1
async def async_setup_entry(
hass: HomeAssistant,
config_entry: VeluxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up switch(es) for Velux platform."""
pyvlx = config_entry.runtime_data
async_add_entities(
VeluxOnOffSwitch(node, config_entry.entry_id)
for node in pyvlx.nodes
if isinstance(node, OnOffSwitch)
)
class VeluxOnOffSwitch(VeluxEntity, SwitchEntity):
"""Representation of a Velux on/off switch."""
_attr_name = None
node: OnOffSwitch
@property
def is_on(self) -> bool:
"""Return true if switch is on."""
return self.node.is_on()
@wrap_pyvlx_call_exceptions
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the switch on."""
await self.node.set_on()
@wrap_pyvlx_call_exceptions
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the switch off."""
await self.node.set_off()

View File

@@ -593,6 +593,10 @@ ZEROCONF = {
"domain": "hdfury",
"name": "vrroom-*",
},
{
"domain": "homevolt",
"name": "homevolt*",
},
{
"domain": "lektrico",
"name": "lektrico*",

View File

@@ -47,7 +47,7 @@ ifaddr==0.2.0
Jinja2==3.1.6
lru-dict==1.3.0
mutagen==1.47.0
openai==2.15.0
openai==2.21.0
orjson==3.11.5
packaging>=23.1
paho-mqtt==2.1.0

View File

@@ -526,12 +526,14 @@ class OzoneConcentrationConverter(BaseUnitConverter):
UNIT_CLASS = "ozone"
_UNIT_CONVERSION: dict[str | None, float] = {
CONCENTRATION_PARTS_PER_BILLION: 1e9,
CONCENTRATION_PARTS_PER_MILLION: 1e6,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER: (
_OZONE_MOLAR_MASS / _AMBIENT_IDEAL_GAS_MOLAR_VOLUME * 1e6
),
}
VALID_UNITS = {
CONCENTRATION_PARTS_PER_BILLION,
CONCENTRATION_PARTS_PER_MILLION,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
}

10
mypy.ini generated
View File

@@ -595,6 +595,16 @@ disallow_untyped_defs = true
warn_return_any = true
warn_unreachable = true
[mypy-homeassistant.components.anthropic.*]
check_untyped_defs = true
disallow_incomplete_defs = true
disallow_subclassing_any = true
disallow_untyped_calls = true
disallow_untyped_decorators = true
disallow_untyped_defs = true
warn_return_any = true
warn_unreachable = true
[mypy-homeassistant.components.apache_kafka.*]
check_untyped_defs = true
disallow_incomplete_defs = true

2
requirements_all.txt generated
View File

@@ -1681,7 +1681,7 @@ open-meteo==0.3.2
# homeassistant.components.cloud
# homeassistant.components.open_router
# homeassistant.components.openai_conversation
openai==2.15.0
openai==2.21.0
# homeassistant.components.openerz
openerz-api==0.3.0

View File

@@ -1467,7 +1467,7 @@ open-meteo==0.3.2
# homeassistant.components.cloud
# homeassistant.components.open_router
# homeassistant.components.openai_conversation
openai==2.15.0
openai==2.21.0
# homeassistant.components.openerz
openerz-api==0.3.0

View File

@@ -21,16 +21,6 @@ from anthropic.types.raw_message_delta_event import Delta
import pytest
from homeassistant.components.anthropic.const import (
CONF_CHAT_MODEL,
CONF_THINKING_BUDGET,
CONF_THINKING_EFFORT,
CONF_WEB_SEARCH,
CONF_WEB_SEARCH_CITY,
CONF_WEB_SEARCH_COUNTRY,
CONF_WEB_SEARCH_MAX_USES,
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
DEFAULT_AI_TASK_NAME,
DEFAULT_CONVERSATION_NAME,
)
@@ -84,79 +74,6 @@ def mock_config_entry_with_assist(
return mock_config_entry
@pytest.fixture
def mock_config_entry_with_extended_thinking(
hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> MockConfigEntry:
"""Mock a config entry with extended thinking."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_CHAT_MODEL: "claude-3-7-sonnet-latest",
CONF_THINKING_BUDGET: 1500,
},
)
return mock_config_entry
@pytest.fixture
def mock_config_entry_with_adaptive_thinking(
hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> MockConfigEntry:
"""Mock a config entry with adaptive thinking."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_CHAT_MODEL: "claude-opus-4-6",
CONF_THINKING_EFFORT: "medium",
},
)
return mock_config_entry
@pytest.fixture
def mock_config_entry_with_web_search(
hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> MockConfigEntry:
"""Mock a config entry with server tools enabled."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_CHAT_MODEL: "claude-sonnet-4-5",
CONF_WEB_SEARCH: True,
CONF_WEB_SEARCH_MAX_USES: 5,
CONF_WEB_SEARCH_USER_LOCATION: True,
CONF_WEB_SEARCH_CITY: "San Francisco",
CONF_WEB_SEARCH_REGION: "California",
CONF_WEB_SEARCH_COUNTRY: "US",
CONF_WEB_SEARCH_TIMEZONE: "America/Los_Angeles",
},
)
return mock_config_entry
@pytest.fixture
def mock_config_entry_with_no_structured_output(
hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> MockConfigEntry:
"""Mock a config entry with a model without structured outputs support."""
for subentry in mock_config_entry.subentries.values():
hass.config_entries.async_update_subentry(
mock_config_entry,
subentry,
data={
CONF_CHAT_MODEL: "claude-sonnet-4-0",
},
)
return mock_config_entry
@pytest.fixture
async def mock_init_component(
hass: HomeAssistant, mock_config_entry: MockConfigEntry

View File

@@ -0,0 +1,252 @@
# serializer version: 1
# name: test_generate_structured_data
dict({
'max_tokens': 3000,
'messages': list([
dict({
'content': 'Generate test data',
'role': 'user',
}),
dict({
'content': list([
dict({
'text': '{"characters": ["Mario", "Luigi"]}',
'type': 'text',
}),
]),
'role': 'assistant',
}),
]),
'model': 'claude-haiku-4-5',
'output_config': dict({
'format': dict({
'schema': dict({
'additionalProperties': False,
'properties': dict({
'characters': dict({
'items': dict({
'type': 'string',
}),
'type': 'array',
}),
}),
'required': list([
'characters',
]),
'type': 'object',
}),
'type': 'json_schema',
}),
}),
'stream': True,
'system': list([
dict({
'cache_control': dict({
'type': 'ephemeral',
}),
'text': '''
You are a Home Assistant expert and help users with their tasks.
Current time is 04:00:00. Today's date is 2026-01-01.
''',
'type': 'text',
}),
]),
'temperature': 1.0,
'thinking': dict({
'type': 'disabled',
}),
})
# ---
# name: test_generate_structured_data_legacy
dict({
'max_tokens': 3000,
'messages': list([
dict({
'content': 'Generate test data',
'role': 'user',
}),
dict({
'content': list([
dict({
'text': '{"characters": ["Mario", "Luigi"]}',
'type': 'text',
}),
]),
'role': 'assistant',
}),
]),
'model': 'claude-sonnet-4-0',
'stream': True,
'system': list([
dict({
'cache_control': dict({
'type': 'ephemeral',
}),
'text': '''
You are a Home Assistant expert and help users with their tasks.
Current time is 04:00:00. Today's date is 2026-01-01.
''',
'type': 'text',
}),
]),
'temperature': 1.0,
'thinking': dict({
'type': 'disabled',
}),
'tool_choice': dict({
'name': 'test_task',
'type': 'tool',
}),
'tools': list([
dict({
'description': 'Use this tool to reply to the user',
'input_schema': dict({
'properties': dict({
'characters': dict({
'items': dict({
'type': 'string',
}),
'type': 'array',
}),
}),
'required': list([
'characters',
]),
'type': 'object',
}),
'name': 'test_task',
}),
]),
})
# ---
# name: test_generate_structured_data_legacy_extended_thinking
dict({
'max_tokens': 3000,
'messages': list([
dict({
'content': 'Generate test data',
'role': 'user',
}),
dict({
'content': list([
dict({
'text': '{"characters": ["Mario", "Luigi"]}',
'type': 'text',
}),
]),
'role': 'assistant',
}),
]),
'model': 'claude-sonnet-4-0',
'stream': True,
'system': list([
dict({
'cache_control': dict({
'type': 'ephemeral',
}),
'text': '''
You are a Home Assistant expert and help users with their tasks.
Current time is 04:00:00. Today's date is 2026-01-01.
''',
'type': 'text',
}),
dict({
'text': "Claude MUST use the 'test_task' tool to provide the final answer instead of plain text.",
'type': 'text',
}),
]),
'thinking': dict({
'budget_tokens': 1500,
'type': 'enabled',
}),
'tool_choice': dict({
'type': 'auto',
}),
'tools': list([
dict({
'description': 'Use this tool to reply to the user',
'input_schema': dict({
'properties': dict({
'characters': dict({
'items': dict({
'type': 'string',
}),
'type': 'array',
}),
}),
'required': list([
'characters',
]),
'type': 'object',
}),
'name': 'test_task',
}),
]),
})
# ---
# name: test_generate_structured_data_legacy_tools
dict({
'max_tokens': 3000,
'messages': list([
dict({
'content': 'Generate test data',
'role': 'user',
}),
dict({
'content': list([
dict({
'text': '{"characters": ["Mario", "Luigi"]}',
'type': 'text',
}),
]),
'role': 'assistant',
}),
]),
'model': 'claude-sonnet-4-0',
'stream': True,
'system': list([
dict({
'cache_control': dict({
'type': 'ephemeral',
}),
'text': '''
You are a Home Assistant expert and help users with their tasks.
Current time is 04:00:00. Today's date is 2026-01-01.
''',
'type': 'text',
}),
]),
'temperature': 1.0,
'thinking': dict({
'type': 'disabled',
}),
'tool_choice': dict({
'type': 'any',
}),
'tools': list([
dict({
'max_uses': None,
'name': 'web_search',
'type': 'web_search_20250305',
}),
dict({
'description': 'Use this tool to reply to the user',
'input_schema': dict({
'properties': dict({
'characters': dict({
'items': dict({
'type': 'string',
}),
'type': 'array',
}),
}),
'required': list([
'characters',
]),
'type': 'object',
}),
'name': 'test_task',
}),
]),
})
# ---

View File

@@ -1,4 +1,118 @@
# serializer version: 1
# name: test_disabled_thinking
list([
dict({
'content': '''
You are a voice assistant for Home Assistant.
Answer questions about the world truthfully.
Answer in plain text. Keep it simple and to the point.
Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.
''',
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
'role': 'system',
}),
dict({
'attachments': None,
'content': 'hello',
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
'role': 'user',
}),
dict({
'agent_id': 'conversation.claude_conversation',
'content': 'Hello, how can I help you today?',
'created': HAFakeDatetime(2024, 5, 24, 12, 0, tzinfo=datetime.timezone.utc),
'native': None,
'role': 'assistant',
'thinking_content': None,
'tool_calls': None,
}),
])
# ---
# name: test_disabled_thinking.1
dict({
'max_tokens': 3000,
'messages': list([
dict({
'content': 'hello',
'role': 'user',
}),
dict({
'content': list([
dict({
'text': 'Hello, how can I help you today?',
'type': 'text',
}),
]),
'role': 'assistant',
}),
]),
'model': 'claude-opus-4-6',
'stream': True,
'system': list([
dict({
'cache_control': dict({
'type': 'ephemeral',
}),
'text': '''
You are a voice assistant for Home Assistant.
Answer questions about the world truthfully.
Answer in plain text. Keep it simple and to the point.
Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.
''',
'type': 'text',
}),
]),
'temperature': 1.0,
'thinking': dict({
'type': 'disabled',
}),
})
# ---
# name: test_extended_thinking
dict({
'max_tokens': 3000,
'messages': list([
dict({
'content': 'hello',
'role': 'user',
}),
dict({
'content': list([
dict({
'signature': 'ErUBCkYIARgCIkCYXaVNJShe3A86Hp7XUzh9YsCYBbJTbQsrklTAPtJ2sP/NoB6tSzpK/nTL6CjSo2R6n0KNBIg5MH6asM2R/kmaEgyB/X1FtZq5OQAC7jUaDEPWCdcwGQ4RaBy5wiIwmRxExIlDhoY6tILoVPnOExkC/0igZxHEwxK8RU/fmw0b+o+TwAarzUitwzbo21E5Kh3pa3I6yqVROf1t2F8rFocNUeCegsWV/ytwYV+ayA==',
'thinking': 'The user has just greeted me with "Hi". This is a simple greeting and doesn\'t require any Home Assistant function calls. I should respond with a friendly greeting and let them know I\'m available to help with their smart home.',
'type': 'thinking',
}),
dict({
'text': 'Hello, how can I help you today?',
'type': 'text',
}),
]),
'role': 'assistant',
}),
]),
'model': 'claude-3-7-sonnet-latest',
'stream': True,
'system': list([
dict({
'cache_control': dict({
'type': 'ephemeral',
}),
'text': '''
You are a voice assistant for Home Assistant.
Answer questions about the world truthfully.
Answer in plain text. Keep it simple and to the point.
Only if the user wants to control a device, tell them to expose entities to their voice assistant in Home Assistant.
''',
'type': 'text',
}),
]),
'thinking': dict({
'budget_tokens': 1500,
'type': 'enabled',
}),
})
# ---
# name: test_extended_thinking_tool_call
list([
dict({

View File

@@ -3,10 +3,13 @@
from pathlib import Path
from unittest.mock import AsyncMock, patch
from freezegun import freeze_time
import pytest
from syrupy.assertion import SnapshotAssertion
import voluptuous as vol
from homeassistant.components import ai_task, media_source
from homeassistant.components.anthropic.const import CONF_CHAT_MODEL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er, selector
@@ -51,13 +54,45 @@ async def test_generate_data(
assert result.data == "The test data"
async def test_generate_structured_data_legacy(
async def test_empty_data(
hass: HomeAssistant,
mock_config_entry_with_no_structured_output: MockConfigEntry,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
entity_registry: er.EntityRegistry,
) -> None:
"""Test AI Task data generation but the data returned is empty."""
mock_create_stream.return_value = [create_content_block(0, [""])]
with pytest.raises(
HomeAssistantError, match="Last content in chat log is not an AssistantContent"
):
await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id="ai_task.claude_ai_task",
instructions="Generate test data",
)
@freeze_time("2026-01-01 12:00:00")
async def test_generate_structured_data_legacy(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test AI Task structured data generation with legacy method."""
for subentry in mock_config_entry.subentries.values():
hass.config_entries.async_update_subentry(
mock_config_entry,
subentry,
data={
CONF_CHAT_MODEL: "claude-sonnet-4-0",
},
)
mock_create_stream.return_value = [
create_tool_use_block(
1,
@@ -86,15 +121,122 @@ async def test_generate_structured_data_legacy(
)
assert result.data == {"characters": ["Mario", "Luigi"]}
assert mock_create_stream.call_args.kwargs.copy() == snapshot
@freeze_time("2026-01-01 12:00:00")
async def test_generate_structured_data_legacy_tools(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test AI Task structured data generation with legacy method and tools enabled."""
mock_create_stream.return_value = [
create_tool_use_block(
1,
"toolu_0123456789AbCdEfGhIjKlM",
"test_task",
['{"charac', 'ters": ["Mario', '", "Luigi"]}'],
),
]
for subentry in mock_config_entry.subentries.values():
hass.config_entries.async_update_subentry(
mock_config_entry,
subentry,
data={"chat_model": "claude-sonnet-4-0", "web_search": True},
)
result = await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id="ai_task.claude_ai_task",
instructions="Generate test data",
structure=vol.Schema(
{
vol.Required("characters"): selector.selector(
{
"text": {
"multiple": True,
}
}
)
},
),
)
assert result.data == {"characters": ["Mario", "Luigi"]}
assert mock_create_stream.call_args.kwargs.copy() == snapshot
@freeze_time("2026-01-01 12:00:00")
async def test_generate_structured_data_legacy_extended_thinking(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test AI Task structured data generation with legacy method and extended_thinking."""
mock_create_stream.return_value = [
create_tool_use_block(
1,
"toolu_0123456789AbCdEfGhIjKlM",
"test_task",
['{"charac', 'ters": ["Mario', '", "Luigi"]}'],
),
]
for subentry in mock_config_entry.subentries.values():
hass.config_entries.async_update_subentry(
mock_config_entry,
subentry,
data={
"chat_model": "claude-sonnet-4-0",
"thinking_budget": 1500,
},
)
result = await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id="ai_task.claude_ai_task",
instructions="Generate test data",
structure=vol.Schema(
{
vol.Required("characters"): selector.selector(
{
"text": {
"multiple": True,
}
}
)
},
),
)
assert result.data == {"characters": ["Mario", "Luigi"]}
assert mock_create_stream.call_args.kwargs.copy() == snapshot
async def test_generate_invalid_structured_data_legacy(
hass: HomeAssistant,
mock_config_entry_with_no_structured_output: MockConfigEntry,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
) -> None:
"""Test AI Task with invalid JSON response with legacy method."""
for subentry in mock_config_entry.subentries.values():
hass.config_entries.async_update_subentry(
mock_config_entry,
subentry,
data={
CONF_CHAT_MODEL: "claude-sonnet-4-0",
},
)
mock_create_stream.return_value = [
create_tool_use_block(
1,
@@ -126,11 +268,13 @@ async def test_generate_invalid_structured_data_legacy(
)
@freeze_time("2026-01-01 12:00:00")
async def test_generate_structured_data(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test AI Task structured data generation."""
mock_create_stream.return_value = [
@@ -156,6 +300,7 @@ async def test_generate_structured_data(
)
assert result.data == {"characters": ["Mario", "Luigi"]}
assert mock_create_stream.call_args.kwargs.copy() == snapshot
async def test_generate_data_with_attachments(
@@ -177,7 +322,7 @@ async def test_generate_data_with_attachments(
side_effect=[
media_source.PlayMedia(
url="http://example.com/doorbell_snapshot.jpg",
mime_type="image/jpeg",
mime_type="image/jpg",
path=Path("doorbell_snapshot.jpg"),
),
media_source.PlayMedia(
@@ -188,10 +333,6 @@ async def test_generate_data_with_attachments(
],
),
patch("pathlib.Path.exists", return_value=True),
patch(
"homeassistant.components.openai_conversation.entity.guess_file_type",
return_value=("image/jpeg", None),
),
patch("pathlib.Path.read_bytes", return_value=b"fake_image_data"),
):
result = await ai_task.async_generate_data(
@@ -242,3 +383,75 @@ async def test_generate_data_with_attachments(
assert document_block["source"]["data"] == "ZmFrZV9pbWFnZV9kYXRh"
assert document_block["source"]["media_type"] == "application/pdf"
assert document_block["source"]["type"] == "base64"
async def test_generate_data_invalid_attachments(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
entity_registry: er.EntityRegistry,
) -> None:
"""Test AI Task data generation with attachments of unsupported type."""
entity_id = "ai_task.claude_ai_task"
mock_create_stream.return_value = [create_content_block(0, ["Hi there!"])]
# Test path that doesn't exist
with (
patch(
"homeassistant.components.media_source.async_resolve_media",
side_effect=[
media_source.PlayMedia(
url="http://example.com/doorbell_snapshot.jpg",
mime_type="image/jpeg",
path=Path("doorbell_snapshot.jpg"),
)
],
),
patch("pathlib.Path.exists", return_value=False),
pytest.raises(
HomeAssistantError, match="`doorbell_snapshot.jpg` does not exist"
),
):
await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id=entity_id,
instructions="Test prompt",
attachments=[
{"media_content_id": "media-source://media/doorbell_snapshot.jpg"},
],
)
# Test unsupported file type
with (
patch(
"homeassistant.components.media_source.async_resolve_media",
side_effect=[
media_source.PlayMedia(
url="http://example.com/doorbell_snapshot.txt",
mime_type=None,
path=Path("doorbell_snapshot.txt"),
)
],
),
patch("pathlib.Path.exists", return_value=True),
patch(
"homeassistant.components.anthropic.entity.guess_file_type",
return_value=("text/plain", None),
),
pytest.raises(
HomeAssistantError,
match="Only images and PDF are supported by the Anthropic API",
),
):
await ai_task.async_generate_data(
hass,
task_name="Test Task",
entity_id=entity_id,
instructions="Test prompt",
attachments=[
{"media_content_id": "media-source://media/doorbell_snapshot.txt"},
],
)

View File

@@ -780,6 +780,44 @@ async def test_creating_ai_task_subentry_advanced(
}
async def test_reauth(hass: HomeAssistant) -> None:
"""Test we can reauthenticate."""
# Pretend we already set up a config entry.
hass.config.components.add("anthropic")
mock_config_entry = MockConfigEntry(
domain=DOMAIN,
state=config_entries.ConfigEntryState.LOADED,
)
mock_config_entry.add_to_hass(hass)
result = await mock_config_entry.start_reauth_flow(hass)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "reauth_confirm"
with (
patch(
"homeassistant.components.anthropic.config_flow.anthropic.resources.models.AsyncModels.list",
new_callable=AsyncMock,
),
patch(
"homeassistant.components.anthropic.async_setup_entry",
return_value=True,
),
):
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_API_KEY: "new_api_key",
},
)
await hass.async_block_till_done()
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert mock_config_entry.data[CONF_API_KEY] == "new_api_key"
@pytest.mark.parametrize(
("current_llm_apis", "suggested_llm_apis", "expected_options"),
[

View File

@@ -18,6 +18,18 @@ from syrupy.assertion import SnapshotAssertion
import voluptuous as vol
from homeassistant.components import conversation
from homeassistant.components.anthropic.const import (
CONF_CHAT_MODEL,
CONF_THINKING_BUDGET,
CONF_THINKING_EFFORT,
CONF_WEB_SEARCH,
CONF_WEB_SEARCH_CITY,
CONF_WEB_SEARCH_COUNTRY,
CONF_WEB_SEARCH_MAX_USES,
CONF_WEB_SEARCH_REGION,
CONF_WEB_SEARCH_TIMEZONE,
CONF_WEB_SEARCH_USER_LOCATION,
)
from homeassistant.components.anthropic.entity import CitationDetails, ContentDetails
from homeassistant.const import CONF_LLM_HASS_API
from homeassistant.core import Context, HomeAssistant
@@ -517,11 +529,22 @@ async def test_refusal(
async def test_extended_thinking(
hass: HomeAssistant,
mock_config_entry_with_extended_thinking: MockConfigEntry,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test extended thinking support."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_CHAT_MODEL: "claude-3-7-sonnet-latest",
CONF_THINKING_BUDGET: 1500,
},
)
mock_create_stream.return_value = [
(
*create_thinking_block(
@@ -550,12 +573,52 @@ async def test_extended_thinking(
assert len(chat_log.content) == 3
assert chat_log.content[1].content == "hello"
assert chat_log.content[2].content == "Hello, how can I help you today?"
call_args = mock_create_stream.call_args.kwargs.copy()
call_args.pop("tools", None)
assert call_args == snapshot
@freeze_time("2024-05-24 12:00:00")
async def test_disabled_thinking(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test conversation with thinking effort disabled."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={
CONF_LLM_HASS_API: "assist",
CONF_CHAT_MODEL: "claude-opus-4-6",
CONF_THINKING_EFFORT: "none",
},
)
mock_create_stream.return_value = [
create_content_block(1, ["Hello, how can I help you today?"])
]
result = await conversation.async_converse(
hass, "hello", None, Context(), agent_id="conversation.claude_conversation"
)
chat_log = hass.data.get(conversation.chat_log.DATA_CHAT_LOGS).get(
result.conversation_id
)
assert len(chat_log.content) == 3
assert chat_log.content == snapshot
call_args = mock_create_stream.call_args.kwargs.copy()
call_args.pop("tools", None)
assert call_args == snapshot
@freeze_time("2024-05-24 12:00:00")
async def test_redacted_thinking(
hass: HomeAssistant,
mock_config_entry_with_extended_thinking: MockConfigEntry,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
@@ -590,12 +653,22 @@ async def test_redacted_thinking(
async def test_extended_thinking_tool_call(
mock_get_tools,
hass: HomeAssistant,
mock_config_entry_with_adaptive_thinking: MockConfigEntry,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test that thinking blocks and their order are preserved in with tool calls."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_CHAT_MODEL: "claude-opus-4-6",
CONF_THINKING_EFFORT: "medium",
},
)
agent_id = "conversation.claude_conversation"
context = Context()
@@ -658,12 +731,28 @@ async def test_extended_thinking_tool_call(
@freeze_time("2025-10-31 12:00:00")
async def test_web_search(
hass: HomeAssistant,
mock_config_entry_with_web_search: MockConfigEntry,
mock_config_entry: MockConfigEntry,
mock_init_component,
mock_create_stream: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Test web search."""
hass.config_entries.async_update_subentry(
mock_config_entry,
next(iter(mock_config_entry.subentries.values())),
data={
CONF_LLM_HASS_API: llm.LLM_API_ASSIST,
CONF_CHAT_MODEL: "claude-sonnet-4-5",
CONF_WEB_SEARCH: True,
CONF_WEB_SEARCH_MAX_USES: 5,
CONF_WEB_SEARCH_USER_LOCATION: True,
CONF_WEB_SEARCH_CITY: "San Francisco",
CONF_WEB_SEARCH_REGION: "California",
CONF_WEB_SEARCH_COUNTRY: "US",
CONF_WEB_SEARCH_TIMEZONE: "America/Los_Angeles",
},
)
web_search_results = [
WebSearchResultBlock(
type="web_search_result",

View File

@@ -9,14 +9,19 @@ from anthropic import (
AuthenticationError,
BadRequestError,
)
import httpx
from httpx import URL, Request, Response
import pytest
from homeassistant.components.anthropic.const import DOMAIN
from homeassistant.config_entries import ConfigEntryDisabler, ConfigSubentryData
from homeassistant.const import CONF_API_KEY
from homeassistant.components.anthropic.const import DATA_REPAIR_DEFER_RELOAD, DOMAIN
from homeassistant.config_entries import (
ConfigEntryDisabler,
ConfigEntryState,
ConfigSubentryData,
)
from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers import device_registry as dr, entity_registry as er, llm
from homeassistant.helpers.device_registry import DeviceEntryDisabler
from homeassistant.helpers.entity_registry import RegistryEntryDisabler
from homeassistant.setup import async_setup_component
@@ -40,17 +45,6 @@ from tests.common import MockConfigEntry
),
"anthropic integration not ready yet: Your credit balance is too low to access the Claude API",
),
(
AuthenticationError(
message="invalid x-api-key",
response=Response(
status_code=401,
request=Request(method="POST", url=URL()),
),
body={"type": "error", "error": {"type": "authentication_error"}},
),
"Invalid API key",
),
],
)
async def test_init_error(
@@ -70,6 +64,133 @@ async def test_init_error(
assert error in caplog.text
async def test_init_auth_error(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test auth error during init errors."""
with patch(
"anthropic.resources.models.AsyncModels.list",
side_effect=AuthenticationError(
response=httpx.Response(
status_code=500, request=httpx.Request(method="GET", url="test")
),
body=None,
message="",
),
):
assert await async_setup_component(hass, "anthropic", {})
await hass.async_block_till_done()
assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR
async def test_deferred_update(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_init_component,
) -> None:
"""Test that update is deferred."""
for subentry in mock_config_entry.subentries.values():
if subentry.subentry_type == "conversation":
conversation_subentry = subentry
elif subentry.subentry_type == "ai_task_data":
ai_task_subentry = subentry
old_client = mock_config_entry.runtime_data
# Set deferred update
defer_reload_entries: set[str] = hass.data.setdefault(DOMAIN, {}).setdefault(
DATA_REPAIR_DEFER_RELOAD, set()
)
defer_reload_entries.add(mock_config_entry.entry_id)
# Update the conversation subentry
hass.config_entries.async_update_subentry(
mock_config_entry,
conversation_subentry,
data={CONF_LLM_HASS_API: llm.LLM_API_ASSIST},
)
await hass.async_block_till_done()
# Verify that the entry is not reloaded yet
assert mock_config_entry.runtime_data is old_client
# Clear deferred update
defer_reload_entries.discard(mock_config_entry.entry_id)
# Update the AI Task subentry
hass.config_entries.async_update_subentry(
mock_config_entry,
ai_task_subentry,
data={CONF_LLM_HASS_API: llm.LLM_API_ASSIST},
)
await hass.async_block_till_done()
# Verify that the entry is reloaded
assert mock_config_entry.runtime_data is not old_client
async def test_downgrade_from_v3_to_v2(
hass: HomeAssistant,
device_registry: dr.DeviceRegistry,
entity_registry: er.EntityRegistry,
) -> None:
"""Test that migration from future versions is skipped."""
# Create a v3 config entry
mock_config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_API_KEY: "test-api-key"},
version=3,
minor_version=0,
subentries_data=[
{
"data": {
"recommended": True,
"llm_hass_api": ["assist"],
"prompt": "You are a helpful assistant",
"chat_model": "claude-3-haiku-20240307",
},
"subentry_id": "mock_id",
"subentry_type": "conversation",
"title": "Claude haiku",
"unique_id": None,
},
],
)
mock_config_entry.add_to_hass(hass)
conversation_device = device_registry.async_get_or_create(
config_entry_id=mock_config_entry.entry_id,
config_subentry_id="mock_id",
identifiers={(DOMAIN, mock_config_entry.entry_id)},
name=mock_config_entry.title,
manufacturer="Anthropic",
model="Claude",
entry_type=dr.DeviceEntryType.SERVICE,
)
entity_registry.async_get_or_create(
"conversation",
DOMAIN,
mock_config_entry.entry_id,
config_entry=mock_config_entry,
config_subentry_id="mock_id",
device_id=conversation_device.id,
suggested_object_id="claude",
)
# Run migration
with patch(
"homeassistant.components.anthropic.async_setup_entry",
return_value=True,
):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
# Verify migration was skipped and version was not updated
assert mock_config_entry.version == 3
assert mock_config_entry.minor_version == 0
async def test_migration_from_v1_to_v2(
hass: HomeAssistant,
device_registry: dr.DeviceRegistry,

View File

@@ -20,17 +20,17 @@
'labels': set({
}),
'name': None,
'object_id_base': 'Line Crossing',
'object_id_base': 'Line crossing',
'options': dict({
}),
'original_device_class': <BinarySensorDeviceClass.MOTION: 'motion'>,
'original_icon': None,
'original_name': 'Line Crossing',
'original_name': 'Line crossing',
'platform': 'hikvision',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'translation_key': 'line_crossing',
'unique_id': 'DS-2CD2142FWD-I20170101AAAA_Line Crossing_1',
'unit_of_measurement': None,
})
@@ -39,7 +39,7 @@
StateSnapshot({
'attributes': ReadOnlyDict({
'device_class': 'motion',
'friendly_name': 'Front Camera Line Crossing',
'friendly_name': 'Front Camera Line crossing',
'last_tripped_time': '2024-01-01T00:00:00Z',
}),
'context': <ANY>,

View File

@@ -1,5 +1,6 @@
"""Test Hikvision binary sensors."""
import logging
from unittest.mock import MagicMock
import pytest
@@ -115,22 +116,28 @@ async def test_binary_sensor_no_sensors(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_hikcamera: MagicMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test setup when device has no sensors."""
mock_hikcamera.return_value.current_event_states = None
await setup_integration(hass, mock_config_entry)
with caplog.at_level(logging.WARNING):
await setup_integration(hass, mock_config_entry)
# No binary sensors should be created
states = hass.states.async_entity_ids("binary_sensor")
assert len(states) == 0
# Verify warning was logged
assert "has no sensors available" in caplog.text
@pytest.mark.parametrize("amount_of_channels", [2])
async def test_binary_sensor_nvr_device(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_hikcamera: MagicMock,
device_registry: dr.DeviceRegistry,
) -> None:
"""Test binary sensor naming for NVR devices."""
mock_hikcamera.return_value.get_type = "NVR"
@@ -140,12 +147,22 @@ async def test_binary_sensor_nvr_device(
await setup_integration(hass, mock_config_entry)
# NVR sensors are on per-channel devices
state = hass.states.get("binary_sensor.front_camera_channel_1_motion")
assert state is not None
# Verify NVR channel devices are created with via_device linking
channel_1_device = device_registry.async_get_device(
identifiers={(DOMAIN, f"{TEST_DEVICE_ID}_1")}
)
assert channel_1_device is not None
assert channel_1_device.via_device_id is not None
state = hass.states.get("binary_sensor.front_camera_channel_2_motion")
assert state is not None
channel_2_device = device_registry.async_get_device(
identifiers={(DOMAIN, f"{TEST_DEVICE_ID}_2")}
)
assert channel_2_device is not None
assert channel_2_device.via_device_id is not None
# Verify sensors are created (entity IDs depend on translation loading)
states = hass.states.async_entity_ids("binary_sensor")
assert len(states) == 2
async def test_binary_sensor_state_on(
@@ -172,17 +189,22 @@ async def test_binary_sensor_device_class_unknown(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_hikcamera: MagicMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test binary sensor with unknown device class."""
"""Test unknown sensor types are logged and skipped."""
mock_hikcamera.return_value.current_event_states = {
"Unknown Event": [(False, 1)],
}
await setup_integration(hass, mock_config_entry)
with caplog.at_level(logging.WARNING):
await setup_integration(hass, mock_config_entry)
state = hass.states.get("binary_sensor.front_camera_unknown_event")
assert state is not None
assert state.attributes.get(ATTR_DEVICE_CLASS) is None
# No entity should be created for unknown sensor types
states = hass.states.async_entity_ids("binary_sensor")
assert len(states) == 0
# Verify warning was logged for unknown sensor type
assert "Unknown Hikvision sensor type 'Unknown Event'" in caplog.text
async def test_yaml_import_creates_deprecation_issue(

View File

@@ -2,19 +2,31 @@
from __future__ import annotations
from ipaddress import IPv4Address
from unittest.mock import AsyncMock, MagicMock
from homevolt import HomevoltAuthenticationError, HomevoltConnectionError
import pytest
from homeassistant.components.homevolt.const import DOMAIN
from homeassistant.config_entries import SOURCE_USER
from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
from homeassistant.const import CONF_HOST, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from tests.common import MockConfigEntry
DISCOVERY_INFO = ZeroconfServiceInfo(
ip_address=IPv4Address("192.168.1.123"),
ip_addresses=[IPv4Address("192.168.1.123")],
port=80,
hostname="homevolt.local.",
type="_http._tcp.local.",
name="homevolt._http._tcp.local.",
properties={},
)
async def test_full_flow_success(
hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_homevolt_client: MagicMock
@@ -305,3 +317,157 @@ async def test_reauth_flow_errors(
CONF_HOST: "127.0.0.1",
CONF_PASSWORD: "correct-password",
}
async def test_zeroconf_confirm_flow_success(
hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_homevolt_client: MagicMock
) -> None:
"""Test zeroconf flow shows confirm step before creating entry."""
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=DISCOVERY_INFO,
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "zeroconf_confirm"
assert result["description_placeholders"] == {"host": "192.168.1.123"}
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "Homevolt"
assert result["data"] == {CONF_HOST: "192.168.1.123", CONF_PASSWORD: None}
assert result["result"].unique_id == "40580137858664"
assert len(mock_setup_entry.mock_calls) == 1
async def test_zeroconf_duplicate_aborts(
hass: HomeAssistant,
mock_setup_entry: AsyncMock,
mock_homevolt_client: MagicMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test zeroconf flow aborts when unique id is already configured."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=DISCOVERY_INFO,
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
assert mock_config_entry.data[CONF_HOST] == "192.168.1.123"
async def test_zeroconf_confirm_with_password_success(
hass: HomeAssistant,
mock_setup_entry: AsyncMock,
mock_homevolt_client: MagicMock,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test zeroconf confirm collects password and creates entry when auth is required."""
mock_homevolt_client.update_info.side_effect = HomevoltAuthenticationError
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=DISCOVERY_INFO,
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "zeroconf_confirm"
assert result["description_placeholders"] == {"host": "192.168.1.123"}
mock_homevolt_client.update_info.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PASSWORD: "test-password"},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "Homevolt"
assert result["data"] == {
CONF_HOST: "192.168.1.123",
CONF_PASSWORD: "test-password",
}
assert result["result"].unique_id == "40580137858664"
assert len(mock_setup_entry.mock_calls) == 1
async def test_zeroconf_confirm_with_password_invalid_then_success(
hass: HomeAssistant,
mock_setup_entry: AsyncMock,
mock_homevolt_client: MagicMock,
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Test zeroconf confirm shows error on invalid password, then succeeds."""
mock_homevolt_client.update_info.side_effect = HomevoltAuthenticationError
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=DISCOVERY_INFO,
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "zeroconf_confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PASSWORD: "wrong-password"},
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == "zeroconf_confirm"
assert result["errors"] == {"base": "invalid_auth"}
mock_homevolt_client.update_info.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_PASSWORD: "correct-password"},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == "Homevolt"
assert result["data"] == {
CONF_HOST: "192.168.1.123",
CONF_PASSWORD: "correct-password",
}
assert result["result"].unique_id == "40580137858664"
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
("exception", "expected_reason"),
[
(HomevoltConnectionError, "cannot_connect"),
(Exception("Unexpected error"), "unknown"),
],
ids=["connection_error", "unknown_error"],
)
async def test_zeroconf_error_aborts(
hass: HomeAssistant,
mock_setup_entry: AsyncMock,
mock_homevolt_client: MagicMock,
exception: Exception,
expected_reason: str,
) -> None:
"""Test zeroconf flow aborts on error during discovery."""
mock_homevolt_client.update_info.side_effect = exception
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=DISCOVERY_INFO,
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == expected_reason

View File

@@ -1,6 +1,7 @@
"""Common fixtures for the liebherr tests."""
from collections.abc import Generator
import copy
from unittest.mock import AsyncMock, MagicMock, patch
from pyliebherrhomeapi import (
@@ -9,6 +10,7 @@ from pyliebherrhomeapi import (
DeviceType,
TemperatureControl,
TemperatureUnit,
ToggleControl,
ZonePosition,
)
import pytest
@@ -52,6 +54,34 @@ MOCK_DEVICE_STATE = DeviceState(
max=-16,
unit=TemperatureUnit.CELSIUS,
),
ToggleControl(
name="supercool",
type="ToggleControl",
zone_id=1,
zone_position=ZonePosition.TOP,
value=False,
),
ToggleControl(
name="superfrost",
type="ToggleControl",
zone_id=2,
zone_position=ZonePosition.BOTTOM,
value=True,
),
ToggleControl(
name="partymode",
type="ToggleControl",
zone_id=None,
zone_position=None,
value=False,
),
ToggleControl(
name="nightmode",
type="ToggleControl",
zone_id=None,
zone_position=None,
value=True,
),
],
)
@@ -90,8 +120,15 @@ def mock_liebherr_client() -> Generator[MagicMock]:
):
client = mock_client.return_value
client.get_devices.return_value = [MOCK_DEVICE]
client.get_device_state.return_value = MOCK_DEVICE_STATE
# Return a fresh copy each call so mutations don't leak between calls.
client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
MOCK_DEVICE_STATE
)
client.set_temperature = AsyncMock()
client.set_supercool = AsyncMock()
client.set_superfrost = AsyncMock()
client.set_party_mode = AsyncMock()
client.set_night_mode = AsyncMock()
yield client

View File

@@ -32,6 +32,34 @@
'zone_id': 2,
'zone_position': 'bottom',
}),
dict({
'name': 'supercool',
'type': 'ToggleControl',
'value': False,
'zone_id': 1,
'zone_position': 'top',
}),
dict({
'name': 'superfrost',
'type': 'ToggleControl',
'value': True,
'zone_id': 2,
'zone_position': 'bottom',
}),
dict({
'name': 'partymode',
'type': 'ToggleControl',
'value': False,
'zone_id': None,
'zone_position': None,
}),
dict({
'name': 'nightmode',
'type': 'ToggleControl',
'value': True,
'zone_id': None,
'zone_position': None,
}),
]),
'device': dict({
'device_id': 'test_device_id',

View File

@@ -0,0 +1,246 @@
# serializer version: 1
# name: test_single_zone_switch[switch.single_zone_fridge_supercool-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.single_zone_fridge_supercool',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'SuperCool',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'SuperCool',
'platform': 'liebherr',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'supercool',
'unique_id': 'single_zone_id_supercool_1',
'unit_of_measurement': None,
})
# ---
# name: test_single_zone_switch[switch.single_zone_fridge_supercool-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Single Zone Fridge SuperCool',
}),
'context': <ANY>,
'entity_id': 'switch.single_zone_fridge_supercool',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---
# name: test_switches[switch.test_fridge_bottom_zone_superfrost-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_fridge_bottom_zone_superfrost',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Bottom zone SuperFrost',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Bottom zone SuperFrost',
'platform': 'liebherr',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'superfrost_bottom_zone',
'unique_id': 'test_device_id_superfrost_2',
'unit_of_measurement': None,
})
# ---
# name: test_switches[switch.test_fridge_bottom_zone_superfrost-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test Fridge Bottom zone SuperFrost',
}),
'context': <ANY>,
'entity_id': 'switch.test_fridge_bottom_zone_superfrost',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_switches[switch.test_fridge_night_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_fridge_night_mode',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Night mode',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Night mode',
'platform': 'liebherr',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'night_mode',
'unique_id': 'test_device_id_night_mode',
'unit_of_measurement': None,
})
# ---
# name: test_switches[switch.test_fridge_night_mode-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test Fridge Night mode',
}),
'context': <ANY>,
'entity_id': 'switch.test_fridge_night_mode',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'on',
})
# ---
# name: test_switches[switch.test_fridge_party_mode-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_fridge_party_mode',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Party mode',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Party mode',
'platform': 'liebherr',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'party_mode',
'unique_id': 'test_device_id_party_mode',
'unit_of_measurement': None,
})
# ---
# name: test_switches[switch.test_fridge_party_mode-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test Fridge Party mode',
}),
'context': <ANY>,
'entity_id': 'switch.test_fridge_party_mode',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---
# name: test_switches[switch.test_fridge_top_zone_supercool-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_fridge_top_zone_supercool',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Top zone SuperCool',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Top zone SuperCool',
'platform': 'liebherr',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': 'supercool_top_zone',
'unique_id': 'test_device_id_supercool_1',
'unit_of_measurement': None,
})
# ---
# name: test_switches[switch.test_fridge_top_zone_supercool-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test Fridge Top zone SuperCool',
}),
'context': <ANY>,
'entity_id': 'switch.test_fridge_top_zone_supercool',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---

View File

@@ -1,5 +1,6 @@
"""Test the Liebherr number platform."""
import copy
from datetime import timedelta
from unittest.mock import MagicMock, patch
@@ -28,7 +29,7 @@ from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er
from .conftest import MOCK_DEVICE
from .conftest import MOCK_DEVICE, MOCK_DEVICE_STATE
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
@@ -71,7 +72,7 @@ async def test_single_zone_number(
device_name="K2601",
)
mock_liebherr_client.get_devices.return_value = [device]
mock_liebherr_client.get_device_state.return_value = DeviceState(
single_zone_state = DeviceState(
device=device,
controls=[
TemperatureControl(
@@ -87,6 +88,9 @@ async def test_single_zone_number(
)
],
)
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
single_zone_state
)
mock_config_entry.add_to_hass(hass)
with patch("homeassistant.components.liebherr.PLATFORMS", platforms):
@@ -111,7 +115,7 @@ async def test_multi_zone_with_none_position(
device_name="CBNes9999",
)
mock_liebherr_client.get_devices.return_value = [device]
mock_liebherr_client.get_device_state.return_value = DeviceState(
multi_zone_state = DeviceState(
device=device,
controls=[
TemperatureControl(
@@ -138,6 +142,9 @@ async def test_multi_zone_with_none_position(
),
],
)
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
multi_zone_state
)
mock_config_entry.add_to_hass(hass)
with patch("homeassistant.components.liebherr.PLATFORMS", platforms):
@@ -192,7 +199,10 @@ async def test_set_temperature_failure(
"Connection failed"
)
with pytest.raises(HomeAssistantError, match="Failed to set temperature"):
with pytest.raises(
HomeAssistantError,
match="An error occurred while communicating with the device: Connection failed",
):
await hass.services.async_call(
NUMBER_DOMAIN,
SERVICE_SET_VALUE,
@@ -231,7 +241,9 @@ async def test_number_update_failure(
assert state.state == STATE_UNAVAILABLE
# Simulate recovery
mock_liebherr_client.get_device_state.side_effect = None
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
MOCK_DEVICE_STATE
)
freezer.tick(timedelta(seconds=61))
async_fire_time_changed(hass)
@@ -261,7 +273,7 @@ async def test_number_when_control_missing(
assert state.attributes["unit_of_measurement"] == "°C"
# Device stops reporting controls
mock_liebherr_client.get_device_state.return_value = DeviceState(
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: DeviceState(
device=MOCK_DEVICE, controls=[]
)
@@ -290,7 +302,7 @@ async def test_number_with_none_min_max(
device_name="K2601",
)
mock_liebherr_client.get_devices.return_value = [device]
mock_liebherr_client.get_device_state.return_value = DeviceState(
none_min_max_state = DeviceState(
device=device,
controls=[
TemperatureControl(
@@ -306,6 +318,9 @@ async def test_number_with_none_min_max(
)
],
)
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
none_min_max_state
)
mock_config_entry.add_to_hass(hass)
with patch("homeassistant.components.liebherr.PLATFORMS", platforms):

View File

@@ -1,5 +1,6 @@
"""Test the Liebherr sensor platform."""
import copy
from datetime import timedelta
from unittest.mock import MagicMock, patch
@@ -26,7 +27,7 @@ from homeassistant.const import STATE_UNAVAILABLE, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from .conftest import MOCK_DEVICE
from .conftest import MOCK_DEVICE, MOCK_DEVICE_STATE
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
@@ -59,7 +60,7 @@ async def test_single_zone_sensor(
device_name="K2601",
)
mock_liebherr_client.get_devices.return_value = [device]
mock_liebherr_client.get_device_state.return_value = DeviceState(
single_zone_state = DeviceState(
device=device,
controls=[
TemperatureControl(
@@ -72,6 +73,9 @@ async def test_single_zone_sensor(
)
],
)
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
single_zone_state
)
mock_config_entry.add_to_hass(hass)
with patch("homeassistant.components.liebherr.PLATFORMS", platforms):
@@ -96,7 +100,7 @@ async def test_multi_zone_with_none_position(
device_name="CBNes9999",
)
mock_liebherr_client.get_devices.return_value = [device]
mock_liebherr_client.get_device_state.return_value = DeviceState(
multi_zone_state = DeviceState(
device=device,
controls=[
TemperatureControl(
@@ -117,6 +121,9 @@ async def test_multi_zone_with_none_position(
),
],
)
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
multi_zone_state
)
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
@@ -170,7 +177,9 @@ async def test_sensor_update_failure(
assert state.state == STATE_UNAVAILABLE
# Simulate recovery
mock_liebherr_client.get_device_state.side_effect = None
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
MOCK_DEVICE_STATE
)
freezer.tick(timedelta(seconds=61))
async_fire_time_changed(hass)
@@ -237,7 +246,7 @@ async def test_sensor_unavailable_when_control_missing(
assert state.state == "5"
# Device stops reporting controls (e.g., zone removed or API issue)
mock_liebherr_client.get_device_state.return_value = DeviceState(
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: DeviceState(
device=MOCK_DEVICE, controls=[]
)

View File

@@ -0,0 +1,268 @@
"""Test the Liebherr switch platform."""
import copy
from datetime import timedelta
from typing import Any
from unittest.mock import MagicMock, patch
from freezegun.api import FrozenDateTimeFactory
from pyliebherrhomeapi import (
Device,
DeviceState,
DeviceType,
TemperatureControl,
TemperatureUnit,
ToggleControl,
ZonePosition,
)
from pyliebherrhomeapi.exceptions import LiebherrConnectionError
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.const import (
ATTR_ENTITY_ID,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_OFF,
STATE_UNAVAILABLE,
Platform,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import entity_registry as er
from .conftest import MOCK_DEVICE, MOCK_DEVICE_STATE
from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform
@pytest.fixture
def platforms() -> list[Platform]:
"""Fixture to specify platforms to test."""
return [Platform.SWITCH]
@pytest.fixture(autouse=True)
def enable_all_entities(entity_registry_enabled_by_default: None) -> None:
"""Make sure all entities are enabled."""
@pytest.mark.usefixtures("init_integration")
async def test_switches(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test all switch entities with multi-zone device."""
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.parametrize(
("entity_id", "service", "method", "kwargs"),
[
(
"switch.test_fridge_top_zone_supercool",
SERVICE_TURN_ON,
"set_supercool",
{"device_id": "test_device_id", "zone_id": 1, "value": True},
),
(
"switch.test_fridge_top_zone_supercool",
SERVICE_TURN_OFF,
"set_supercool",
{"device_id": "test_device_id", "zone_id": 1, "value": False},
),
(
"switch.test_fridge_bottom_zone_superfrost",
SERVICE_TURN_ON,
"set_superfrost",
{"device_id": "test_device_id", "zone_id": 2, "value": True},
),
(
"switch.test_fridge_party_mode",
SERVICE_TURN_ON,
"set_party_mode",
{"device_id": "test_device_id", "value": True},
),
(
"switch.test_fridge_night_mode",
SERVICE_TURN_OFF,
"set_night_mode",
{"device_id": "test_device_id", "value": False},
),
],
)
@pytest.mark.usefixtures("init_integration")
async def test_switch_service_calls(
hass: HomeAssistant,
mock_liebherr_client: MagicMock,
entity_id: str,
service: str,
method: str,
kwargs: dict[str, Any],
) -> None:
"""Test switch turn on/off service calls."""
initial_call_count = mock_liebherr_client.get_device_state.call_count
await hass.services.async_call(
SWITCH_DOMAIN,
service,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
getattr(mock_liebherr_client, method).assert_called_once_with(**kwargs)
# Verify coordinator refresh was triggered
assert mock_liebherr_client.get_device_state.call_count > initial_call_count
@pytest.mark.parametrize(
("entity_id", "method"),
[
("switch.test_fridge_top_zone_supercool", "set_supercool"),
("switch.test_fridge_party_mode", "set_party_mode"),
],
)
@pytest.mark.usefixtures("init_integration")
async def test_switch_failure(
hass: HomeAssistant,
mock_liebherr_client: MagicMock,
entity_id: str,
method: str,
) -> None:
"""Test switch fails gracefully on connection error."""
getattr(mock_liebherr_client, method).side_effect = LiebherrConnectionError(
"Connection failed"
)
with pytest.raises(
HomeAssistantError,
match="An error occurred while communicating with the device: Connection failed",
):
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{ATTR_ENTITY_ID: entity_id},
blocking=True,
)
@pytest.mark.usefixtures("init_integration")
async def test_switch_update_failure(
hass: HomeAssistant,
mock_liebherr_client: MagicMock,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test switch becomes unavailable when coordinator update fails and recovers."""
entity_id = "switch.test_fridge_top_zone_supercool"
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
# Simulate update error
mock_liebherr_client.get_device_state.side_effect = LiebherrConnectionError(
"Connection failed"
)
freezer.tick(timedelta(seconds=61))
async_fire_time_changed(hass)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_UNAVAILABLE
# Simulate recovery
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
MOCK_DEVICE_STATE
)
freezer.tick(timedelta(seconds=61))
async_fire_time_changed(hass)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
@pytest.mark.usefixtures("init_integration")
async def test_switch_when_control_missing(
hass: HomeAssistant,
mock_liebherr_client: MagicMock,
freezer: FrozenDateTimeFactory,
) -> None:
"""Test switch entity behavior when toggle control is removed."""
entity_id = "switch.test_fridge_top_zone_supercool"
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
# Device stops reporting toggle controls
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: DeviceState(
device=MOCK_DEVICE, controls=[]
)
freezer.tick(timedelta(seconds=61))
async_fire_time_changed(hass)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_UNAVAILABLE
async def test_single_zone_switch(
hass: HomeAssistant,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
mock_liebherr_client: MagicMock,
mock_config_entry: MockConfigEntry,
platforms: list[Platform],
) -> None:
"""Test single zone device uses name without zone suffix."""
device = Device(
device_id="single_zone_id",
nickname="Single Zone Fridge",
device_type=DeviceType.FRIDGE,
device_name="K2601",
)
mock_liebherr_client.get_devices.return_value = [device]
single_zone_state = DeviceState(
device=device,
controls=[
TemperatureControl(
zone_id=1,
zone_position=ZonePosition.TOP,
name="Fridge",
type="fridge",
value=4,
target=4,
min=2,
max=8,
unit=TemperatureUnit.CELSIUS,
),
ToggleControl(
name="supercool",
type="ToggleControl",
zone_id=1,
zone_position=ZonePosition.TOP,
value=False,
),
],
)
mock_liebherr_client.get_device_state.side_effect = lambda *a, **kw: copy.deepcopy(
single_zone_state
)
mock_config_entry.add_to_hass(hass)
with patch("homeassistant.components.liebherr.PLATFORMS", platforms):
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)

View File

@@ -39,11 +39,13 @@ def mock_nintendo_device() -> Device:
mock.limit_time = 120
mock.today_playing_time = 110
mock.today_time_remaining = 10
mock.bedtime_end = time(hour=7)
mock.bedtime_alarm = time(hour=19)
mock.timer_mode = DeviceTimerMode.DAILY
mock.extra_playing_time = 30
mock.add_extra_time.return_value = None
mock.set_bedtime_alarm.return_value = None
mock.set_bedtime_end_time.return_value = None
mock.update_max_daily_playtime.return_value = None
mock.set_timer_mode.return_value = None
mock.forced_termination_mode = True

View File

@@ -48,3 +48,52 @@
'state': '19:00:00',
})
# ---
# name: test_time[time.home_assistant_test_bedtime_end_time-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'time',
'entity_category': None,
'entity_id': 'time.home_assistant_test_bedtime_end_time',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': 'Bedtime end time',
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': 'Bedtime end time',
'platform': 'nintendo_parental_controls',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': <NintendoParentalControlsTime.BEDTIME_END_TIME: 'bedtime_end_time'>,
'unique_id': 'testdevid_bedtime_end_time',
'unit_of_measurement': None,
})
# ---
# name: test_time[time.home_assistant_test_bedtime_end_time-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Home Assistant Test Bedtime end time',
}),
'context': <ANY>,
'entity_id': 'time.home_assistant_test_bedtime_end_time',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': '07:00:00',
})
# ---

View File

@@ -38,11 +38,25 @@ async def test_time(
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.parametrize(
("entity_id", "new_value", "called_function_name"),
[
("time.home_assistant_test_bedtime_alarm", "20:00:00", "set_bedtime_alarm"),
(
"time.home_assistant_test_bedtime_end_time",
"06:30:00",
"set_bedtime_end_time",
),
],
)
async def test_set_time(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_nintendo_client: AsyncMock,
mock_nintendo_device: AsyncMock,
entity_id: str,
new_value: str,
called_function_name: str,
) -> None:
"""Test time platform service validation errors."""
with patch(
@@ -53,21 +67,44 @@ async def test_set_time(
await hass.services.async_call(
TIME_DOMAIN,
SERVICE_SET_VALUE,
service_data={ATTR_TIME: "20:00:00"},
target={ATTR_ENTITY_ID: "time.home_assistant_test_bedtime_alarm"},
service_data={ATTR_TIME: new_value},
target={ATTR_ENTITY_ID: entity_id},
blocking=True,
)
assert len(mock_nintendo_device.set_bedtime_alarm.mock_calls) == 1
assert len(getattr(mock_nintendo_device, called_function_name).mock_calls) == 1
@pytest.mark.parametrize(
("entity_id", "new_value", "translation_key", "called_function_name"),
[
(
"time.home_assistant_test_bedtime_alarm",
"03:00:00",
"bedtime_alarm_out_of_range",
"set_bedtime_alarm",
),
(
"time.home_assistant_test_bedtime_end_time",
"10:00:00",
"bedtime_end_time_out_of_range",
"set_bedtime_end_time",
),
],
)
async def test_set_time_service_exceptions(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_nintendo_client: AsyncMock,
mock_nintendo_device: AsyncMock,
entity_id: str,
new_value: str,
translation_key: str,
called_function_name: str,
) -> None:
"""Test time platform service validation errors."""
mock_nintendo_device.set_bedtime_alarm.side_effect = BedtimeOutOfRangeError(None)
getattr(
mock_nintendo_device, called_function_name
).side_effect = BedtimeOutOfRangeError(None)
with patch(
"homeassistant.components.nintendo_parental_controls._PLATFORMS",
[Platform.TIME],
@@ -77,9 +114,9 @@ async def test_set_time_service_exceptions(
await hass.services.async_call(
TIME_DOMAIN,
SERVICE_SET_VALUE,
service_data={ATTR_TIME: "01:00:00"},
target={ATTR_ENTITY_ID: "time.home_assistant_test_bedtime_alarm"},
service_data={ATTR_TIME: new_value},
target={ATTR_ENTITY_ID: entity_id},
blocking=True,
)
assert len(mock_nintendo_device.set_bedtime_alarm.mock_calls) == 1
assert err.value.translation_key == "bedtime_alarm_out_of_range"
assert len(getattr(mock_nintendo_device, called_function_name).mock_calls) == 1
assert err.value.translation_key == translation_key

View File

@@ -1,6 +1,7 @@
"""Tests config_flow."""
from collections.abc import Awaitable, Callable
from pathlib import Path
from tempfile import NamedTemporaryFile
from unittest.mock import patch
@@ -23,6 +24,7 @@ from homeassistant.components.sftp_storage.const import (
from homeassistant.config_entries import SOURCE_USER
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers.storage import STORAGE_DIR
from .conftest import USER_INPUT, SSHClientConnectionMock
@@ -32,8 +34,10 @@ type ComponentSetup = Callable[[], Awaitable[None]]
@pytest.fixture
def mock_process_uploaded_file():
def mock_process_uploaded_file(hass: HomeAssistant):
"""Mocks ability to process uploaded private key."""
# Ensure .storage directory exists, as it would in a real HA instance
Path(hass.config.path(STORAGE_DIR)).mkdir(parents=True, exist_ok=True)
with (
patch(
"homeassistant.components.sftp_storage.config_flow.process_uploaded_file"

View File

@@ -1,7 +1,7 @@
"""Test common."""
import datetime as dt
from unittest.mock import AsyncMock
from unittest.mock import AsyncMock, MagicMock
CONSUMPTION_DATA_1 = [
{
@@ -49,8 +49,24 @@ def mock_get_homes(only_active=True):
tibber_home.has_active_subscription = True
tibber_home.has_real_time_consumption = False
tibber_home.country = "NO"
tibber_home.last_cons_data_timestamp = dt.datetime(2016, 1, 1, 12, 44, 57)
tibber_home.last_data_timestamp = dt.datetime(2016, 1, 1, 12, 48, 57)
tibber_home.last_cons_data_timestamp = dt.datetime(
2016, 1, 1, 12, 44, 57, tzinfo=dt.UTC
)
tibber_home.last_data_timestamp = dt.datetime(2016, 1, 1, 12, 48, 57, tzinfo=dt.UTC)
tibber_home.price_unit = "NOK/kWh"
tibber_home.current_price_data = MagicMock(
return_value=(0.0, None, None),
)
tibber_home.current_attributes = MagicMock(
return_value={
"max_price": 0.0,
"avg_price": 0.0,
"min_price": 0.0,
"off_peak_1": 0.0,
"peak": 0.0,
"off_peak_2": 0.0,
},
)
def get_historic_data(n_data, resolution="HOURLY", production=False):
return PRODUCTION_DATA_1 if production else CONSUMPTION_DATA_1

View File

@@ -36,6 +36,8 @@ async def test_data_api_runtime_creates_client(hass: HomeAssistant) -> None:
runtime = TibberRuntimeData(
session=session,
data_api_coordinator=MagicMock(),
data_coordinator=MagicMock(),
)
with patch("homeassistant.components.tibber.tibber.Tibber") as mock_client_cls:
@@ -72,6 +74,8 @@ async def test_data_api_runtime_missing_token_raises(hass: HomeAssistant) -> Non
runtime = TibberRuntimeData(
session=session,
data_api_coordinator=MagicMock(),
data_coordinator=MagicMock(),
)
with pytest.raises(ConfigEntryAuthFailed):

View File

@@ -1,6 +1,6 @@
"""Test adding external statistics from Tibber."""
from unittest.mock import AsyncMock
from unittest.mock import AsyncMock, MagicMock
from homeassistant.components.recorder import Recorder
from homeassistant.components.recorder.statistics import statistics_during_period
@@ -24,7 +24,11 @@ async def test_async_setup_entry(
tibber_connection.fetch_production_data_active_homes.return_value = None
tibber_connection.get_homes = mock_get_homes
coordinator = TibberDataCoordinator(hass, config_entry, tibber_connection)
runtime_data = MagicMock()
runtime_data.async_get_client = AsyncMock(return_value=tibber_connection)
config_entry.runtime_data = runtime_data
coordinator = TibberDataCoordinator(hass, config_entry)
await coordinator._async_update_data()
await async_wait_recording_done(hass)

View File

@@ -4,7 +4,7 @@ from collections.abc import Generator
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from pyvlx import Blind, Light, OnOffLight, Scene, Window
from pyvlx import Blind, Light, OnOffLight, OnOffSwitch, Scene, Window
from homeassistant.components.velux import DOMAIN
from homeassistant.const import CONF_HOST, CONF_MAC, CONF_PASSWORD, Platform
@@ -114,6 +114,19 @@ def mock_onoff_light() -> AsyncMock:
return light
# an on/off switch
@pytest.fixture
def mock_onoff_switch() -> AsyncMock:
"""Create a mock Velux on/off switch."""
switch = AsyncMock(spec=OnOffSwitch, autospec=True)
switch.name = "Test On Off Switch"
switch.serial_number = "0817"
switch.is_on.return_value = False
switch.is_off.return_value = True
switch.pyvlx = MagicMock()
return switch
# fixture to create all other cover types via parameterization
@pytest.fixture
def mock_cover_type(request: pytest.FixtureRequest) -> AsyncMock:
@@ -133,6 +146,7 @@ def mock_pyvlx(
mock_scene: AsyncMock,
mock_light: AsyncMock,
mock_onoff_light: AsyncMock,
mock_onoff_switch: AsyncMock,
mock_window: AsyncMock,
mock_blind: AsyncMock,
request: pytest.FixtureRequest,
@@ -152,6 +166,7 @@ def mock_pyvlx(
pyvlx.nodes = [
mock_light,
mock_onoff_light,
mock_onoff_switch,
mock_blind,
mock_window,
mock_cover_type,

View File

@@ -0,0 +1,50 @@
# serializer version: 1
# name: test_switch_setup[mock_onoff_switch][switch.test_on_off_switch-entry]
EntityRegistryEntrySnapshot({
'aliases': set({
}),
'area_id': None,
'capabilities': None,
'config_entry_id': <ANY>,
'config_subentry_id': <ANY>,
'device_class': None,
'device_id': <ANY>,
'disabled_by': None,
'domain': 'switch',
'entity_category': None,
'entity_id': 'switch.test_on_off_switch',
'has_entity_name': True,
'hidden_by': None,
'icon': None,
'id': <ANY>,
'labels': set({
}),
'name': None,
'object_id_base': None,
'options': dict({
}),
'original_device_class': None,
'original_icon': None,
'original_name': None,
'platform': 'velux',
'previous_unique_id': None,
'suggested_object_id': None,
'supported_features': 0,
'translation_key': None,
'unique_id': '0817',
'unit_of_measurement': None,
})
# ---
# name: test_switch_setup[mock_onoff_switch][switch.test_on_off_switch-state]
StateSnapshot({
'attributes': ReadOnlyDict({
'friendly_name': 'Test On Off Switch',
}),
'context': <ANY>,
'entity_id': 'switch.test_on_off_switch',
'last_changed': <ANY>,
'last_reported': <ANY>,
'last_updated': <ANY>,
'state': 'off',
})
# ---

View File

@@ -0,0 +1,133 @@
"""Test Velux switch entities."""
from unittest.mock import AsyncMock
import pytest
from pyvlx import PyVLXException
from homeassistant.components.switch import (
DOMAIN as SWITCH_DOMAIN,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
)
from homeassistant.const import STATE_OFF, STATE_ON, Platform
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import device_registry as dr, entity_registry as er
from . import update_callback_entity
from tests.common import MockConfigEntry, SnapshotAssertion, snapshot_platform
# Apply setup_integration fixture to all tests in this module
pytestmark = pytest.mark.usefixtures("setup_integration")
@pytest.fixture
def platform() -> Platform:
"""Fixture to specify platform to test."""
return Platform.SWITCH
@pytest.mark.parametrize("mock_pyvlx", ["mock_onoff_switch"], indirect=True)
async def test_switch_setup(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
entity_registry: er.EntityRegistry,
mock_pyvlx: AsyncMock,
snapshot: SnapshotAssertion,
) -> None:
"""Snapshot the entity and validate registry metadata for switch entities."""
await snapshot_platform(
hass,
entity_registry,
snapshot,
mock_config_entry.entry_id,
)
async def test_switch_device_association(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
device_registry: dr.DeviceRegistry,
mock_onoff_switch: AsyncMock,
) -> None:
"""Test switch device association."""
test_entity_id = f"switch.{mock_onoff_switch.name.lower().replace(' ', '_')}"
entity_entry = entity_registry.async_get(test_entity_id)
assert entity_entry is not None
assert entity_entry.device_id is not None
device_entry = device_registry.async_get(entity_entry.device_id)
assert device_entry is not None
assert ("velux", mock_onoff_switch.serial_number) in device_entry.identifiers
assert device_entry.name == mock_onoff_switch.name
async def test_switch_is_on(hass: HomeAssistant, mock_onoff_switch: AsyncMock) -> None:
"""Test switch on state."""
entity_id = f"switch.{mock_onoff_switch.name.lower().replace(' ', '_')}"
# Initial state is off
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_OFF
# Simulate switching on
mock_onoff_switch.is_on.return_value = True
mock_onoff_switch.is_off.return_value = False
await update_callback_entity(hass, mock_onoff_switch)
state = hass.states.get(entity_id)
assert state is not None
assert state.state == STATE_ON
async def test_switch_turn_on_off(
hass: HomeAssistant, mock_onoff_switch: AsyncMock
) -> None:
"""Test turning switch on."""
entity_id = f"switch.{mock_onoff_switch.name.lower().replace(' ', '_')}"
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{"entity_id": entity_id},
blocking=True,
)
mock_onoff_switch.set_on.assert_awaited_once()
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{"entity_id": entity_id},
blocking=True,
)
mock_onoff_switch.set_off.assert_awaited_once()
@pytest.mark.parametrize("mock_pyvlx", ["mock_onoff_switch"], indirect=True)
async def test_switch_error_handling(
hass: HomeAssistant, mock_onoff_switch: AsyncMock
) -> None:
"""Test error handling when turning switching fails."""
entity_id = f"switch.{mock_onoff_switch.name.lower().replace(' ', '_')}"
mock_onoff_switch.set_on.side_effect = PyVLXException("Connection lost")
mock_onoff_switch.set_off.side_effect = PyVLXException("Connection lost")
with pytest.raises(HomeAssistantError):
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_ON,
{"entity_id": entity_id},
blocking=True,
)
with pytest.raises(HomeAssistantError):
await hass.services.async_call(
SWITCH_DOMAIN,
SERVICE_TURN_OFF,
{"entity_id": entity_id},
blocking=True,
)

View File

@@ -776,6 +776,18 @@ _CONVERTED_VALUE: dict[
60.1378,
CONCENTRATION_PARTS_PER_BILLION,
),
(
1,
CONCENTRATION_PARTS_PER_MILLION,
1995.417,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
),
(
120,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
0.0601378,
CONCENTRATION_PARTS_PER_MILLION,
),
],
PowerConverter: [
(10, UnitOfPower.KILO_WATT, 10000, UnitOfPower.WATT),